1 /* 2 * Copyright 2007,2008,2009,2010 Red Hat, Inc. 3 * Copyright 2010,2012 Google, Inc. 4 * 5 * This is part of HarfBuzz, a text shaping library. 6 * 7 * Permission is hereby granted, without written agreement and without 8 * license or royalty fees, to use, copy, modify, and distribute this 9 * software and its documentation for any purpose, provided that the 10 * above copyright notice and the following two paragraphs appear in 11 * all copies of this software. 12 * 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 17 * DAMAGE. 18 * 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 24 * 25 * Red Hat Author(s): Behdad Esfahbod 26 * Google Author(s): Behdad Esfahbod 27 */ 28 29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 31 32 #include "hb-buffer-private.hh" 33 #include "hb-ot-layout-gdef-table.hh" 34 #include "hb-set-private.hh" 35 36 37 namespace OT { 38 39 40 41 #define TRACE_DISPATCH(this) \ 42 hb_auto_trace_t<context_t::max_debug_depth, typename context_t::return_t> trace \ 43 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ 44 ""); 45 46 47 #ifndef HB_DEBUG_CLOSURE 48 #define HB_DEBUG_CLOSURE (HB_DEBUG+0) 49 #endif 50 51 #define TRACE_CLOSURE(this) \ 52 hb_auto_trace_t<HB_DEBUG_CLOSURE, hb_void_t> trace \ 53 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ 54 ""); 55 56 struct hb_closure_context_t 57 { 58 inline const char *get_name (void) { return "CLOSURE"; } 59 static const unsigned int max_debug_depth = HB_DEBUG_CLOSURE; 60 typedef hb_void_t return_t; 61 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index); 62 template <typename T> 63 inline return_t dispatch (const T &obj) { obj.closure (this); return HB_VOID; } 64 static return_t default_return_value (void) { return HB_VOID; } 65 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; } 66 return_t recurse (unsigned int lookup_index) 67 { 68 if (unlikely (nesting_level_left == 0 || !recurse_func)) 69 return default_return_value (); 70 71 nesting_level_left--; 72 recurse_func (this, lookup_index); 73 nesting_level_left++; 74 return HB_VOID; 75 } 76 77 hb_face_t *face; 78 hb_set_t *glyphs; 79 recurse_func_t recurse_func; 80 unsigned int nesting_level_left; 81 unsigned int debug_depth; 82 83 hb_closure_context_t (hb_face_t *face_, 84 hb_set_t *glyphs_, 85 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) : 86 face (face_), 87 glyphs (glyphs_), 88 recurse_func (NULL), 89 nesting_level_left (nesting_level_left_), 90 debug_depth (0) {} 91 92 void set_recurse_func (recurse_func_t func) { recurse_func = func; } 93 }; 94 95 96 97 #ifndef HB_DEBUG_WOULD_APPLY 98 #define HB_DEBUG_WOULD_APPLY (HB_DEBUG+0) 99 #endif 100 101 #define TRACE_WOULD_APPLY(this) \ 102 hb_auto_trace_t<HB_DEBUG_WOULD_APPLY, bool> trace \ 103 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ 104 "%d glyphs", c->len); 105 106 struct hb_would_apply_context_t 107 { 108 inline const char *get_name (void) { return "WOULD_APPLY"; } 109 static const unsigned int max_debug_depth = HB_DEBUG_WOULD_APPLY; 110 typedef bool return_t; 111 template <typename T> 112 inline return_t dispatch (const T &obj) { return obj.would_apply (this); } 113 static return_t default_return_value (void) { return false; } 114 bool stop_sublookup_iteration (return_t r) const { return r; } 115 116 hb_face_t *face; 117 const hb_codepoint_t *glyphs; 118 unsigned int len; 119 bool zero_context; 120 unsigned int debug_depth; 121 122 hb_would_apply_context_t (hb_face_t *face_, 123 const hb_codepoint_t *glyphs_, 124 unsigned int len_, 125 bool zero_context_) : 126 face (face_), 127 glyphs (glyphs_), 128 len (len_), 129 zero_context (zero_context_), 130 debug_depth (0) {} 131 }; 132 133 134 135 #ifndef HB_DEBUG_COLLECT_GLYPHS 136 #define HB_DEBUG_COLLECT_GLYPHS (HB_DEBUG+0) 137 #endif 138 139 #define TRACE_COLLECT_GLYPHS(this) \ 140 hb_auto_trace_t<HB_DEBUG_COLLECT_GLYPHS, hb_void_t> trace \ 141 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ 142 ""); 143 144 struct hb_collect_glyphs_context_t 145 { 146 inline const char *get_name (void) { return "COLLECT_GLYPHS"; } 147 static const unsigned int max_debug_depth = HB_DEBUG_COLLECT_GLYPHS; 148 typedef hb_void_t return_t; 149 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index); 150 template <typename T> 151 inline return_t dispatch (const T &obj) { obj.collect_glyphs (this); return HB_VOID; } 152 static return_t default_return_value (void) { return HB_VOID; } 153 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; } 154 return_t recurse (unsigned int lookup_index) 155 { 156 if (unlikely (nesting_level_left == 0 || !recurse_func)) 157 return default_return_value (); 158 159 /* Note that GPOS sets recurse_func to NULL already, so it doesn't get 160 * past the previous check. For GSUB, we only want to collect the output 161 * glyphs in the recursion. If output is not requested, we can go home now. */ 162 163 if (output == hb_set_get_empty ()) 164 return HB_VOID; 165 166 hb_set_t *old_before = before; 167 hb_set_t *old_input = input; 168 hb_set_t *old_after = after; 169 before = input = after = hb_set_get_empty (); 170 171 nesting_level_left--; 172 recurse_func (this, lookup_index); 173 nesting_level_left++; 174 175 before = old_before; 176 input = old_input; 177 after = old_after; 178 179 return HB_VOID; 180 } 181 182 hb_face_t *face; 183 hb_set_t *before; 184 hb_set_t *input; 185 hb_set_t *after; 186 hb_set_t *output; 187 recurse_func_t recurse_func; 188 unsigned int nesting_level_left; 189 unsigned int debug_depth; 190 191 hb_collect_glyphs_context_t (hb_face_t *face_, 192 hb_set_t *glyphs_before, /* OUT. May be NULL */ 193 hb_set_t *glyphs_input, /* OUT. May be NULL */ 194 hb_set_t *glyphs_after, /* OUT. May be NULL */ 195 hb_set_t *glyphs_output, /* OUT. May be NULL */ 196 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) : 197 face (face_), 198 before (glyphs_before ? glyphs_before : hb_set_get_empty ()), 199 input (glyphs_input ? glyphs_input : hb_set_get_empty ()), 200 after (glyphs_after ? glyphs_after : hb_set_get_empty ()), 201 output (glyphs_output ? glyphs_output : hb_set_get_empty ()), 202 recurse_func (NULL), 203 nesting_level_left (nesting_level_left_), 204 debug_depth (0) {} 205 206 void set_recurse_func (recurse_func_t func) { recurse_func = func; } 207 }; 208 209 210 211 struct hb_get_coverage_context_t 212 { 213 inline const char *get_name (void) { return "GET_COVERAGE"; } 214 static const unsigned int max_debug_depth = 0; 215 typedef const Coverage &return_t; 216 template <typename T> 217 inline return_t dispatch (const T &obj) { return obj.get_coverage (); } 218 static return_t default_return_value (void) { return Null(Coverage); } 219 220 hb_get_coverage_context_t (void) : 221 debug_depth (0) {} 222 223 unsigned int debug_depth; 224 }; 225 226 227 228 #ifndef HB_DEBUG_APPLY 229 #define HB_DEBUG_APPLY (HB_DEBUG+0) 230 #endif 231 232 #define TRACE_APPLY(this) \ 233 hb_auto_trace_t<HB_DEBUG_APPLY, bool> trace \ 234 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ 235 "idx %d codepoint %u", c->buffer->idx, c->buffer->cur().codepoint); 236 237 struct hb_apply_context_t 238 { 239 inline const char *get_name (void) { return "APPLY"; } 240 static const unsigned int max_debug_depth = HB_DEBUG_APPLY; 241 typedef bool return_t; 242 typedef return_t (*recurse_func_t) (hb_apply_context_t *c, unsigned int lookup_index); 243 template <typename T> 244 inline return_t dispatch (const T &obj) { return obj.apply (this); } 245 static return_t default_return_value (void) { return false; } 246 bool stop_sublookup_iteration (return_t r) const { return r; } 247 return_t recurse (unsigned int lookup_index) 248 { 249 if (unlikely (nesting_level_left == 0 || !recurse_func)) 250 return default_return_value (); 251 252 nesting_level_left--; 253 bool ret = recurse_func (this, lookup_index); 254 nesting_level_left++; 255 return ret; 256 } 257 258 unsigned int table_index; /* GSUB/GPOS */ 259 hb_font_t *font; 260 hb_face_t *face; 261 hb_buffer_t *buffer; 262 hb_direction_t direction; 263 hb_mask_t lookup_mask; 264 bool auto_zwj; 265 recurse_func_t recurse_func; 266 unsigned int nesting_level_left; 267 unsigned int lookup_props; 268 const GDEF &gdef; 269 bool has_glyph_classes; 270 unsigned int debug_depth; 271 272 273 hb_apply_context_t (unsigned int table_index_, 274 hb_font_t *font_, 275 hb_buffer_t *buffer_, 276 hb_mask_t lookup_mask_, 277 bool auto_zwj_) : 278 table_index (table_index_), 279 font (font_), face (font->face), buffer (buffer_), 280 direction (buffer_->props.direction), 281 lookup_mask (lookup_mask_), 282 auto_zwj (auto_zwj_), 283 recurse_func (NULL), 284 nesting_level_left (MAX_NESTING_LEVEL), 285 lookup_props (0), 286 gdef (*hb_ot_layout_from_face (face)->gdef), 287 has_glyph_classes (gdef.has_glyph_classes ()), 288 debug_depth (0) {} 289 290 inline void set_recurse_func (recurse_func_t func) { recurse_func = func; } 291 inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; } 292 inline void set_lookup (const Lookup &l) { lookup_props = l.get_props (); } 293 294 struct matcher_t 295 { 296 inline matcher_t (void) : 297 lookup_props (0), 298 ignore_zwnj (false), 299 ignore_zwj (false), 300 mask (-1), 301 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */ 302 syllable arg1(0), 303 #undef arg1 304 match_func (NULL), 305 match_data (NULL) {}; 306 307 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data); 308 309 inline void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; } 310 inline void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; } 311 inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; } 312 inline void set_mask (hb_mask_t mask_) { mask = mask_; } 313 inline void set_syllable (uint8_t syllable_) { syllable = syllable_; } 314 inline void set_match_func (match_func_t match_func_, 315 const void *match_data_) 316 { match_func = match_func_; match_data = match_data_; } 317 318 enum may_match_t { 319 MATCH_NO, 320 MATCH_YES, 321 MATCH_MAYBE 322 }; 323 324 inline may_match_t may_match (const hb_glyph_info_t &info, 325 const USHORT *glyph_data) const 326 { 327 if (!(info.mask & mask) || 328 (syllable && syllable != info.syllable ())) 329 return MATCH_NO; 330 331 if (match_func) 332 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO; 333 334 return MATCH_MAYBE; 335 } 336 337 enum may_skip_t { 338 SKIP_NO, 339 SKIP_YES, 340 SKIP_MAYBE 341 }; 342 343 inline may_skip_t 344 may_skip (const hb_apply_context_t *c, 345 const hb_glyph_info_t &info) const 346 { 347 unsigned int property; 348 349 property = info.glyph_props(); 350 351 if (!c->match_properties (info.codepoint, property, lookup_props)) 352 return SKIP_YES; 353 354 if (unlikely (_hb_glyph_info_is_default_ignorable (&info) && 355 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) && 356 (ignore_zwj || !_hb_glyph_info_is_zwj (&info)) && 357 !is_a_ligature (info))) 358 return SKIP_MAYBE; 359 360 return SKIP_NO; 361 } 362 363 protected: 364 unsigned int lookup_props; 365 bool ignore_zwnj; 366 bool ignore_zwj; 367 hb_mask_t mask; 368 uint8_t syllable; 369 match_func_t match_func; 370 const void *match_data; 371 }; 372 373 struct skipping_forward_iterator_t 374 { 375 inline skipping_forward_iterator_t (hb_apply_context_t *c_, 376 unsigned int start_index_, 377 unsigned int num_items_, 378 bool context_match = false) : 379 idx (start_index_), 380 c (c_), 381 match_glyph_data (NULL), 382 num_items (num_items_), 383 end (c->buffer->len) 384 { 385 matcher.set_lookup_props (c->lookup_props); 386 /* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */ 387 matcher.set_ignore_zwnj (context_match || c->table_index == 1); 388 /* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */ 389 matcher.set_ignore_zwj (context_match || c->table_index == 1 || c->auto_zwj); 390 if (!context_match) 391 matcher.set_mask (c->lookup_mask); 392 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0); 393 } 394 inline void set_lookup_props (unsigned int lookup_props) { matcher.set_lookup_props (lookup_props); } 395 inline void set_syllable (unsigned int syllable) { matcher.set_syllable (syllable); } 396 inline void set_match_func (matcher_t::match_func_t match_func, 397 const void *match_data, 398 const USHORT glyph_data[]) 399 { 400 matcher.set_match_func (match_func, match_data); 401 match_glyph_data = glyph_data; 402 } 403 404 inline bool has_no_chance (void) const { return unlikely (num_items && idx + num_items >= end); } 405 inline void reject (void) { num_items++; match_glyph_data--; } 406 inline bool next (void) 407 { 408 assert (num_items > 0); 409 while (!has_no_chance ()) 410 { 411 idx++; 412 const hb_glyph_info_t &info = c->buffer->info[idx]; 413 414 matcher_t::may_skip_t skip = matcher.may_skip (c, info); 415 if (unlikely (skip == matcher_t::SKIP_YES)) 416 continue; 417 418 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data); 419 if (match == matcher_t::MATCH_YES || 420 (match == matcher_t::MATCH_MAYBE && 421 skip == matcher_t::SKIP_NO)) 422 { 423 num_items--; 424 match_glyph_data++; 425 return true; 426 } 427 428 if (skip == matcher_t::SKIP_NO) 429 return false; 430 } 431 return false; 432 } 433 434 unsigned int idx; 435 protected: 436 hb_apply_context_t *c; 437 matcher_t matcher; 438 const USHORT *match_glyph_data; 439 440 unsigned int num_items; 441 unsigned int end; 442 }; 443 444 struct skipping_backward_iterator_t 445 { 446 inline skipping_backward_iterator_t (hb_apply_context_t *c_, 447 unsigned int start_index_, 448 unsigned int num_items_, 449 bool context_match = false) : 450 idx (start_index_), 451 c (c_), 452 match_glyph_data (NULL), 453 num_items (num_items_) 454 { 455 matcher.set_lookup_props (c->lookup_props); 456 /* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */ 457 matcher.set_ignore_zwnj (context_match || c->table_index == 1); 458 /* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */ 459 matcher.set_ignore_zwj (context_match || c->table_index == 1 || c->auto_zwj); 460 if (!context_match) 461 matcher.set_mask (c->lookup_mask); 462 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0); 463 } 464 inline void set_lookup_props (unsigned int lookup_props) { matcher.set_lookup_props (lookup_props); } 465 inline void set_syllable (unsigned int syllable) { matcher.set_syllable (syllable); } 466 inline void set_match_func (matcher_t::match_func_t match_func, 467 const void *match_data, 468 const USHORT glyph_data[]) 469 { 470 matcher.set_match_func (match_func, match_data); 471 match_glyph_data = glyph_data; 472 } 473 474 inline bool has_no_chance (void) const { return unlikely (idx < num_items); } 475 inline void reject (void) { num_items++; } 476 inline bool prev (void) 477 { 478 assert (num_items > 0); 479 while (!has_no_chance ()) 480 { 481 idx--; 482 const hb_glyph_info_t &info = c->buffer->out_info[idx]; 483 484 matcher_t::may_skip_t skip = matcher.may_skip (c, info); 485 486 if (unlikely (skip == matcher_t::SKIP_YES)) 487 continue; 488 489 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data); 490 if (match == matcher_t::MATCH_YES || 491 (match == matcher_t::MATCH_MAYBE && 492 skip == matcher_t::SKIP_NO)) 493 { 494 num_items--; 495 match_glyph_data++; 496 return true; 497 } 498 499 if (skip == matcher_t::SKIP_NO) 500 return false; 501 } 502 return false; 503 } 504 505 unsigned int idx; 506 protected: 507 hb_apply_context_t *c; 508 matcher_t matcher; 509 const USHORT *match_glyph_data; 510 511 unsigned int num_items; 512 }; 513 514 inline bool 515 match_properties_mark (hb_codepoint_t glyph, 516 unsigned int glyph_props, 517 unsigned int lookup_props) const 518 { 519 /* If using mark filtering sets, the high short of 520 * lookup_props has the set index. 521 */ 522 if (lookup_props & LookupFlag::UseMarkFilteringSet) 523 return gdef.mark_set_covers (lookup_props >> 16, glyph); 524 525 /* The second byte of lookup_props has the meaning 526 * "ignore marks of attachment type different than 527 * the attachment type specified." 528 */ 529 if (lookup_props & LookupFlag::MarkAttachmentType) 530 return (lookup_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType); 531 532 return true; 533 } 534 535 inline bool 536 match_properties (hb_codepoint_t glyph, 537 unsigned int glyph_props, 538 unsigned int lookup_props) const 539 { 540 /* Not covered, if, for example, glyph class is ligature and 541 * lookup_props includes LookupFlags::IgnoreLigatures 542 */ 543 if (glyph_props & lookup_props & LookupFlag::IgnoreFlags) 544 return false; 545 546 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK)) 547 return match_properties_mark (glyph, glyph_props, lookup_props); 548 549 return true; 550 } 551 552 inline bool 553 check_glyph_property (hb_glyph_info_t *info, 554 unsigned int lookup_props) const 555 { 556 unsigned int property; 557 558 property = info->glyph_props(); 559 560 return match_properties (info->codepoint, property, lookup_props); 561 } 562 563 inline void set_class (hb_codepoint_t glyph_index, unsigned int class_guess) const 564 { 565 if (likely (has_glyph_classes)) 566 buffer->cur().glyph_props() = gdef.get_glyph_props (glyph_index); 567 else if (class_guess) 568 buffer->cur().glyph_props() = class_guess; 569 } 570 571 inline void output_glyph (hb_codepoint_t glyph_index, 572 unsigned int class_guess = 0) const 573 { 574 set_class (glyph_index, class_guess); 575 buffer->output_glyph (glyph_index); 576 } 577 inline void replace_glyph (hb_codepoint_t glyph_index, 578 unsigned int class_guess = 0) const 579 { 580 set_class (glyph_index, class_guess); 581 buffer->replace_glyph (glyph_index); 582 } 583 inline void replace_glyph_inplace (hb_codepoint_t glyph_index, 584 unsigned int class_guess = 0) const 585 { 586 set_class (glyph_index, class_guess); 587 buffer->cur().codepoint = glyph_index; 588 } 589 }; 590 591 592 593 typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data); 594 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data); 595 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data); 596 597 struct ContextClosureFuncs 598 { 599 intersects_func_t intersects; 600 }; 601 struct ContextCollectGlyphsFuncs 602 { 603 collect_glyphs_func_t collect; 604 }; 605 struct ContextApplyFuncs 606 { 607 match_func_t match; 608 }; 609 610 611 static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED) 612 { 613 return glyphs->has (value); 614 } 615 static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data) 616 { 617 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 618 return class_def.intersects_class (glyphs, value); 619 } 620 static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data) 621 { 622 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 623 return (data+coverage).intersects (glyphs); 624 } 625 626 static inline bool intersects_array (hb_closure_context_t *c, 627 unsigned int count, 628 const USHORT values[], 629 intersects_func_t intersects_func, 630 const void *intersects_data) 631 { 632 for (unsigned int i = 0; i < count; i++) 633 if (likely (!intersects_func (c->glyphs, values[i], intersects_data))) 634 return false; 635 return true; 636 } 637 638 639 static inline void collect_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED) 640 { 641 glyphs->add (value); 642 } 643 static inline void collect_class (hb_set_t *glyphs, const USHORT &value, const void *data) 644 { 645 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 646 class_def.add_class (glyphs, value); 647 } 648 static inline void collect_coverage (hb_set_t *glyphs, const USHORT &value, const void *data) 649 { 650 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 651 (data+coverage).add_coverage (glyphs); 652 } 653 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED, 654 hb_set_t *glyphs, 655 unsigned int count, 656 const USHORT values[], 657 collect_glyphs_func_t collect_func, 658 const void *collect_data) 659 { 660 for (unsigned int i = 0; i < count; i++) 661 collect_func (glyphs, values[i], collect_data); 662 } 663 664 665 static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED) 666 { 667 return glyph_id == value; 668 } 669 static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 670 { 671 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 672 return class_def.get_class (glyph_id) == value; 673 } 674 static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 675 { 676 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 677 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED; 678 } 679 680 static inline bool would_match_input (hb_would_apply_context_t *c, 681 unsigned int count, /* Including the first glyph (not matched) */ 682 const USHORT input[], /* Array of input values--start with second glyph */ 683 match_func_t match_func, 684 const void *match_data) 685 { 686 if (count != c->len) 687 return false; 688 689 for (unsigned int i = 1; i < count; i++) 690 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data))) 691 return false; 692 693 return true; 694 } 695 static inline bool match_input (hb_apply_context_t *c, 696 unsigned int count, /* Including the first glyph (not matched) */ 697 const USHORT input[], /* Array of input values--start with second glyph */ 698 match_func_t match_func, 699 const void *match_data, 700 unsigned int *end_offset = NULL, 701 bool *p_is_mark_ligature = NULL, 702 unsigned int *p_total_component_count = NULL) 703 { 704 TRACE_APPLY (NULL); 705 706 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, count - 1); 707 skippy_iter.set_match_func (match_func, match_data, input); 708 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); 709 710 /* 711 * This is perhaps the trickiest part of OpenType... Remarks: 712 * 713 * - If all components of the ligature were marks, we call this a mark ligature. 714 * 715 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize 716 * it as a ligature glyph. 717 * 718 * - Ligatures cannot be formed across glyphs attached to different components 719 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and 720 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother. 721 * However, it would be wrong to ligate that SHADDA,FATHA sequence.o 722 * There is an exception to this: If a ligature tries ligating with marks that 723 * belong to it itself, go ahead, assuming that the font designer knows what 724 * they are doing (otherwise it can break Indic stuff when a matra wants to 725 * ligate with a conjunct...) 726 */ 727 728 bool is_mark_ligature = !!(c->buffer->cur().glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_MARK); 729 730 unsigned int total_component_count = 0; 731 total_component_count += get_lig_num_comps (c->buffer->cur()); 732 733 unsigned int first_lig_id = get_lig_id (c->buffer->cur()); 734 unsigned int first_lig_comp = get_lig_comp (c->buffer->cur()); 735 736 for (unsigned int i = 1; i < count; i++) 737 { 738 if (!skippy_iter.next ()) return TRACE_RETURN (false); 739 740 unsigned int this_lig_id = get_lig_id (c->buffer->info[skippy_iter.idx]); 741 unsigned int this_lig_comp = get_lig_comp (c->buffer->info[skippy_iter.idx]); 742 743 if (first_lig_id && first_lig_comp) { 744 /* If first component was attached to a previous ligature component, 745 * all subsequent components should be attached to the same ligature 746 * component, otherwise we shouldn't ligate them. */ 747 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp) 748 return TRACE_RETURN (false); 749 } else { 750 /* If first component was NOT attached to a previous ligature component, 751 * all subsequent components should also NOT be attached to any ligature 752 * component, unless they are attached to the first component itself! */ 753 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id)) 754 return TRACE_RETURN (false); 755 } 756 757 is_mark_ligature = is_mark_ligature && (c->buffer->info[skippy_iter.idx].glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_MARK); 758 total_component_count += get_lig_num_comps (c->buffer->info[skippy_iter.idx]); 759 } 760 761 if (end_offset) 762 *end_offset = skippy_iter.idx - c->buffer->idx + 1; 763 764 if (p_is_mark_ligature) 765 *p_is_mark_ligature = is_mark_ligature; 766 767 if (p_total_component_count) 768 *p_total_component_count = total_component_count; 769 770 return TRACE_RETURN (true); 771 } 772 static inline void ligate_input (hb_apply_context_t *c, 773 unsigned int count, /* Including the first glyph (not matched) */ 774 const USHORT input[], /* Array of input values--start with second glyph */ 775 match_func_t match_func, 776 const void *match_data, 777 hb_codepoint_t lig_glyph, 778 bool is_mark_ligature, 779 unsigned int total_component_count) 780 { 781 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, count - 1); 782 skippy_iter.set_match_func (match_func, match_data, input); 783 if (skippy_iter.has_no_chance ()) return; 784 785 /* 786 * - If it *is* a mark ligature, we don't allocate a new ligature id, and leave 787 * the ligature to keep its old ligature id. This will allow it to attach to 788 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH, 789 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA wit a 790 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature 791 * later, we don't want them to lose their ligature id/component, otherwise 792 * GPOS will fail to correctly position the mark ligature on top of the 793 * LAM,LAM,HEH ligature. See: 794 * https://bugzilla.gnome.org/show_bug.cgi?id=676343 795 * 796 * - If a ligature is formed of components that some of which are also ligatures 797 * themselves, and those ligature components had marks attached to *their* 798 * components, we have to attach the marks to the new ligature component 799 * positions! Now *that*'s tricky! And these marks may be following the 800 * last component of the whole sequence, so we should loop forward looking 801 * for them and update them. 802 * 803 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a 804 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature 805 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature 806 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to 807 * the new ligature with a component value of 2. 808 * 809 * This in fact happened to a font... See: 810 * https://bugzilla.gnome.org/show_bug.cgi?id=437633 811 */ 812 813 unsigned int klass = is_mark_ligature ? 0 : HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE; 814 unsigned int lig_id = is_mark_ligature ? 0 : allocate_lig_id (c->buffer); 815 unsigned int last_lig_id = get_lig_id (c->buffer->cur()); 816 unsigned int last_num_components = get_lig_num_comps (c->buffer->cur()); 817 unsigned int components_so_far = last_num_components; 818 819 if (!is_mark_ligature) 820 set_lig_props_for_ligature (c->buffer->cur(), lig_id, total_component_count); 821 c->replace_glyph (lig_glyph, klass); 822 823 for (unsigned int i = 1; i < count; i++) 824 { 825 if (!skippy_iter.next ()) return; 826 827 while (c->buffer->idx < skippy_iter.idx) 828 { 829 if (!is_mark_ligature) { 830 unsigned int new_lig_comp = components_so_far - last_num_components + 831 MIN (MAX (get_lig_comp (c->buffer->cur()), 1u), last_num_components); 832 set_lig_props_for_mark (c->buffer->cur(), lig_id, new_lig_comp); 833 } 834 c->buffer->next_glyph (); 835 } 836 837 last_lig_id = get_lig_id (c->buffer->cur()); 838 last_num_components = get_lig_num_comps (c->buffer->cur()); 839 components_so_far += last_num_components; 840 841 /* Skip the base glyph */ 842 c->buffer->idx++; 843 } 844 845 if (!is_mark_ligature && last_lig_id) { 846 /* Re-adjust components for any marks following. */ 847 for (unsigned int i = c->buffer->idx; i < c->buffer->len; i++) { 848 if (last_lig_id == get_lig_id (c->buffer->info[i])) { 849 unsigned int new_lig_comp = components_so_far - last_num_components + 850 MIN (MAX (get_lig_comp (c->buffer->info[i]), 1u), last_num_components); 851 set_lig_props_for_mark (c->buffer->info[i], lig_id, new_lig_comp); 852 } else 853 break; 854 } 855 } 856 } 857 858 static inline bool match_backtrack (hb_apply_context_t *c, 859 unsigned int count, 860 const USHORT backtrack[], 861 match_func_t match_func, 862 const void *match_data) 863 { 864 TRACE_APPLY (NULL); 865 866 hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, c->buffer->backtrack_len (), count, true); 867 skippy_iter.set_match_func (match_func, match_data, backtrack); 868 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); 869 870 for (unsigned int i = 0; i < count; i++) 871 if (!skippy_iter.prev ()) 872 return TRACE_RETURN (false); 873 874 return TRACE_RETURN (true); 875 } 876 877 static inline bool match_lookahead (hb_apply_context_t *c, 878 unsigned int count, 879 const USHORT lookahead[], 880 match_func_t match_func, 881 const void *match_data, 882 unsigned int offset) 883 { 884 TRACE_APPLY (NULL); 885 886 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, c->buffer->idx + offset - 1, count, true); 887 skippy_iter.set_match_func (match_func, match_data, lookahead); 888 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); 889 890 for (unsigned int i = 0; i < count; i++) 891 if (!skippy_iter.next ()) 892 return TRACE_RETURN (false); 893 894 return TRACE_RETURN (true); 895 } 896 897 898 899 struct LookupRecord 900 { 901 inline bool sanitize (hb_sanitize_context_t *c) { 902 TRACE_SANITIZE (this); 903 return TRACE_RETURN (c->check_struct (this)); 904 } 905 906 USHORT sequenceIndex; /* Index into current glyph 907 * sequence--first glyph = 0 */ 908 USHORT lookupListIndex; /* Lookup to apply to that 909 * position--zero--based */ 910 public: 911 DEFINE_SIZE_STATIC (4); 912 }; 913 914 915 template <typename context_t> 916 static inline void recurse_lookups (context_t *c, 917 unsigned int lookupCount, 918 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */) 919 { 920 for (unsigned int i = 0; i < lookupCount; i++) 921 c->recurse (lookupRecord->lookupListIndex); 922 } 923 924 static inline bool apply_lookup (hb_apply_context_t *c, 925 unsigned int count, /* Including the first glyph */ 926 const USHORT input[], /* Array of input values--start with second glyph */ 927 match_func_t match_func, 928 const void *match_data, 929 unsigned int lookupCount, 930 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */) 931 { 932 TRACE_APPLY (NULL); 933 934 unsigned int end = c->buffer->len; 935 if (unlikely (count == 0 || c->buffer->idx + count > end)) 936 return TRACE_RETURN (false); 937 938 /* TODO We don't support lookupRecord arrays that are not increasing: 939 * Should be easy for in_place ones at least. */ 940 941 /* Note: If sublookup is reverse, it will underflow after the first loop 942 * and we jump out of it. Not entirely disastrous. So we don't check 943 * for reverse lookup here. 944 */ 945 946 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, count - 1); 947 skippy_iter.set_match_func (match_func, match_data, input); 948 uint8_t syllable = c->buffer->cur().syllable(); 949 950 unsigned int i = 0; 951 if (lookupCount && 0 == lookupRecord->sequenceIndex) 952 { 953 unsigned int old_pos = c->buffer->idx; 954 955 /* Apply a lookup */ 956 bool done = c->recurse (lookupRecord->lookupListIndex); 957 958 lookupRecord++; 959 lookupCount--; 960 /* Err, this is wrong if the lookup jumped over some glyphs */ 961 i += c->buffer->idx - old_pos; 962 963 if (!done) 964 goto not_applied; 965 else 966 { 967 /* Reinitialize iterator. */ 968 hb_apply_context_t::skipping_forward_iterator_t tmp (c, c->buffer->idx - 1, count - i); 969 tmp.set_syllable (syllable); 970 skippy_iter = tmp; 971 } 972 } 973 else 974 { 975 not_applied: 976 /* No lookup applied for this index */ 977 c->buffer->next_glyph (); 978 i++; 979 } 980 while (i < count) 981 { 982 if (!skippy_iter.next ()) return TRACE_RETURN (true); 983 while (c->buffer->idx < skippy_iter.idx) 984 c->buffer->next_glyph (); 985 986 if (lookupCount && i == lookupRecord->sequenceIndex) 987 { 988 unsigned int old_pos = c->buffer->idx; 989 990 /* Apply a lookup */ 991 bool done = c->recurse (lookupRecord->lookupListIndex); 992 993 lookupRecord++; 994 lookupCount--; 995 /* Err, this is wrong if the lookup jumped over some glyphs */ 996 i += c->buffer->idx - old_pos; 997 998 if (!done) 999 goto not_applied2; 1000 else 1001 { 1002 /* Reinitialize iterator. */ 1003 hb_apply_context_t::skipping_forward_iterator_t tmp (c, c->buffer->idx - 1, count - i); 1004 tmp.set_syllable (syllable); 1005 skippy_iter = tmp; 1006 } 1007 } 1008 else 1009 { 1010 not_applied2: 1011 /* No lookup applied for this index */ 1012 c->buffer->next_glyph (); 1013 i++; 1014 } 1015 } 1016 1017 return TRACE_RETURN (true); 1018 } 1019 1020 1021 1022 /* Contextual lookups */ 1023 1024 struct ContextClosureLookupContext 1025 { 1026 ContextClosureFuncs funcs; 1027 const void *intersects_data; 1028 }; 1029 1030 struct ContextCollectGlyphsLookupContext 1031 { 1032 ContextCollectGlyphsFuncs funcs; 1033 const void *collect_data; 1034 }; 1035 1036 struct ContextApplyLookupContext 1037 { 1038 ContextApplyFuncs funcs; 1039 const void *match_data; 1040 }; 1041 1042 static inline void context_closure_lookup (hb_closure_context_t *c, 1043 unsigned int inputCount, /* Including the first glyph (not matched) */ 1044 const USHORT input[], /* Array of input values--start with second glyph */ 1045 unsigned int lookupCount, 1046 const LookupRecord lookupRecord[], 1047 ContextClosureLookupContext &lookup_context) 1048 { 1049 if (intersects_array (c, 1050 inputCount ? inputCount - 1 : 0, input, 1051 lookup_context.funcs.intersects, lookup_context.intersects_data)) 1052 recurse_lookups (c, 1053 lookupCount, lookupRecord); 1054 } 1055 1056 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, 1057 unsigned int inputCount, /* Including the first glyph (not matched) */ 1058 const USHORT input[], /* Array of input values--start with second glyph */ 1059 unsigned int lookupCount, 1060 const LookupRecord lookupRecord[], 1061 ContextCollectGlyphsLookupContext &lookup_context) 1062 { 1063 collect_array (c, c->input, 1064 inputCount ? inputCount - 1 : 0, input, 1065 lookup_context.funcs.collect, lookup_context.collect_data); 1066 recurse_lookups (c, 1067 lookupCount, lookupRecord); 1068 } 1069 1070 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c, 1071 unsigned int inputCount, /* Including the first glyph (not matched) */ 1072 const USHORT input[], /* Array of input values--start with second glyph */ 1073 unsigned int lookupCount HB_UNUSED, 1074 const LookupRecord lookupRecord[] HB_UNUSED, 1075 ContextApplyLookupContext &lookup_context) 1076 { 1077 return would_match_input (c, 1078 inputCount, input, 1079 lookup_context.funcs.match, lookup_context.match_data); 1080 } 1081 static inline bool context_apply_lookup (hb_apply_context_t *c, 1082 unsigned int inputCount, /* Including the first glyph (not matched) */ 1083 const USHORT input[], /* Array of input values--start with second glyph */ 1084 unsigned int lookupCount, 1085 const LookupRecord lookupRecord[], 1086 ContextApplyLookupContext &lookup_context) 1087 { 1088 return match_input (c, 1089 inputCount, input, 1090 lookup_context.funcs.match, lookup_context.match_data) 1091 && apply_lookup (c, 1092 inputCount, input, 1093 lookup_context.funcs.match, lookup_context.match_data, 1094 lookupCount, lookupRecord); 1095 } 1096 1097 struct Rule 1098 { 1099 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 1100 { 1101 TRACE_CLOSURE (this); 1102 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0)); 1103 context_closure_lookup (c, 1104 inputCount, input, 1105 lookupCount, lookupRecord, 1106 lookup_context); 1107 } 1108 1109 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const 1110 { 1111 TRACE_COLLECT_GLYPHS (this); 1112 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0)); 1113 context_collect_glyphs_lookup (c, 1114 inputCount, input, 1115 lookupCount, lookupRecord, 1116 lookup_context); 1117 } 1118 1119 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1120 { 1121 TRACE_WOULD_APPLY (this); 1122 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0)); 1123 return TRACE_RETURN (context_would_apply_lookup (c, inputCount, input, lookupCount, lookupRecord, lookup_context)); 1124 } 1125 1126 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1127 { 1128 TRACE_APPLY (this); 1129 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0)); 1130 return TRACE_RETURN (context_apply_lookup (c, inputCount, input, lookupCount, lookupRecord, lookup_context)); 1131 } 1132 1133 public: 1134 inline bool sanitize (hb_sanitize_context_t *c) { 1135 TRACE_SANITIZE (this); 1136 return inputCount.sanitize (c) 1137 && lookupCount.sanitize (c) 1138 && c->check_range (input, 1139 input[0].static_size * inputCount 1140 + lookupRecordX[0].static_size * lookupCount); 1141 } 1142 1143 protected: 1144 USHORT inputCount; /* Total number of glyphs in input 1145 * glyph sequence--includes the first 1146 * glyph */ 1147 USHORT lookupCount; /* Number of LookupRecords */ 1148 USHORT input[VAR]; /* Array of match inputs--start with 1149 * second glyph */ 1150 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 1151 * design order */ 1152 public: 1153 DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX); 1154 }; 1155 1156 struct RuleSet 1157 { 1158 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 1159 { 1160 TRACE_CLOSURE (this); 1161 unsigned int num_rules = rule.len; 1162 for (unsigned int i = 0; i < num_rules; i++) 1163 (this+rule[i]).closure (c, lookup_context); 1164 } 1165 1166 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const 1167 { 1168 TRACE_COLLECT_GLYPHS (this); 1169 unsigned int num_rules = rule.len; 1170 for (unsigned int i = 0; i < num_rules; i++) 1171 (this+rule[i]).collect_glyphs (c, lookup_context); 1172 } 1173 1174 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1175 { 1176 TRACE_WOULD_APPLY (this); 1177 unsigned int num_rules = rule.len; 1178 for (unsigned int i = 0; i < num_rules; i++) 1179 { 1180 if ((this+rule[i]).would_apply (c, lookup_context)) 1181 return TRACE_RETURN (true); 1182 } 1183 return TRACE_RETURN (false); 1184 } 1185 1186 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1187 { 1188 TRACE_APPLY (this); 1189 unsigned int num_rules = rule.len; 1190 for (unsigned int i = 0; i < num_rules; i++) 1191 { 1192 if ((this+rule[i]).apply (c, lookup_context)) 1193 return TRACE_RETURN (true); 1194 } 1195 return TRACE_RETURN (false); 1196 } 1197 1198 inline bool sanitize (hb_sanitize_context_t *c) { 1199 TRACE_SANITIZE (this); 1200 return TRACE_RETURN (rule.sanitize (c, this)); 1201 } 1202 1203 protected: 1204 OffsetArrayOf<Rule> 1205 rule; /* Array of Rule tables 1206 * ordered by preference */ 1207 public: 1208 DEFINE_SIZE_ARRAY (2, rule); 1209 }; 1210 1211 1212 struct ContextFormat1 1213 { 1214 inline void closure (hb_closure_context_t *c) const 1215 { 1216 TRACE_CLOSURE (this); 1217 1218 const Coverage &cov = (this+coverage); 1219 1220 struct ContextClosureLookupContext lookup_context = { 1221 {intersects_glyph}, 1222 NULL 1223 }; 1224 1225 unsigned int count = ruleSet.len; 1226 for (unsigned int i = 0; i < count; i++) 1227 if (cov.intersects_coverage (c->glyphs, i)) { 1228 const RuleSet &rule_set = this+ruleSet[i]; 1229 rule_set.closure (c, lookup_context); 1230 } 1231 } 1232 1233 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1234 { 1235 TRACE_COLLECT_GLYPHS (this); 1236 (this+coverage).add_coverage (c->input); 1237 1238 struct ContextCollectGlyphsLookupContext lookup_context = { 1239 {collect_glyph}, 1240 NULL 1241 }; 1242 1243 unsigned int count = ruleSet.len; 1244 for (unsigned int i = 0; i < count; i++) 1245 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1246 } 1247 1248 inline bool would_apply (hb_would_apply_context_t *c) const 1249 { 1250 TRACE_WOULD_APPLY (this); 1251 1252 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; 1253 struct ContextApplyLookupContext lookup_context = { 1254 {match_glyph}, 1255 NULL 1256 }; 1257 return TRACE_RETURN (rule_set.would_apply (c, lookup_context)); 1258 } 1259 1260 inline const Coverage &get_coverage (void) const 1261 { 1262 return this+coverage; 1263 } 1264 1265 inline bool apply (hb_apply_context_t *c) const 1266 { 1267 TRACE_APPLY (this); 1268 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1269 if (likely (index == NOT_COVERED)) 1270 return TRACE_RETURN (false); 1271 1272 const RuleSet &rule_set = this+ruleSet[index]; 1273 struct ContextApplyLookupContext lookup_context = { 1274 {match_glyph}, 1275 NULL 1276 }; 1277 return TRACE_RETURN (rule_set.apply (c, lookup_context)); 1278 } 1279 1280 inline bool sanitize (hb_sanitize_context_t *c) { 1281 TRACE_SANITIZE (this); 1282 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); 1283 } 1284 1285 protected: 1286 USHORT format; /* Format identifier--format = 1 */ 1287 OffsetTo<Coverage> 1288 coverage; /* Offset to Coverage table--from 1289 * beginning of table */ 1290 OffsetArrayOf<RuleSet> 1291 ruleSet; /* Array of RuleSet tables 1292 * ordered by Coverage Index */ 1293 public: 1294 DEFINE_SIZE_ARRAY (6, ruleSet); 1295 }; 1296 1297 1298 struct ContextFormat2 1299 { 1300 inline void closure (hb_closure_context_t *c) const 1301 { 1302 TRACE_CLOSURE (this); 1303 if (!(this+coverage).intersects (c->glyphs)) 1304 return; 1305 1306 const ClassDef &class_def = this+classDef; 1307 1308 struct ContextClosureLookupContext lookup_context = { 1309 {intersects_class}, 1310 &class_def 1311 }; 1312 1313 unsigned int count = ruleSet.len; 1314 for (unsigned int i = 0; i < count; i++) 1315 if (class_def.intersects_class (c->glyphs, i)) { 1316 const RuleSet &rule_set = this+ruleSet[i]; 1317 rule_set.closure (c, lookup_context); 1318 } 1319 } 1320 1321 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1322 { 1323 TRACE_COLLECT_GLYPHS (this); 1324 (this+coverage).add_coverage (c->input); 1325 1326 const ClassDef &class_def = this+classDef; 1327 struct ContextCollectGlyphsLookupContext lookup_context = { 1328 {collect_class}, 1329 &class_def 1330 }; 1331 1332 unsigned int count = ruleSet.len; 1333 for (unsigned int i = 0; i < count; i++) 1334 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1335 } 1336 1337 inline bool would_apply (hb_would_apply_context_t *c) const 1338 { 1339 TRACE_WOULD_APPLY (this); 1340 1341 const ClassDef &class_def = this+classDef; 1342 unsigned int index = class_def.get_class (c->glyphs[0]); 1343 const RuleSet &rule_set = this+ruleSet[index]; 1344 struct ContextApplyLookupContext lookup_context = { 1345 {match_class}, 1346 &class_def 1347 }; 1348 return TRACE_RETURN (rule_set.would_apply (c, lookup_context)); 1349 } 1350 1351 inline const Coverage &get_coverage (void) const 1352 { 1353 return this+coverage; 1354 } 1355 1356 inline bool apply (hb_apply_context_t *c) const 1357 { 1358 TRACE_APPLY (this); 1359 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1360 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 1361 1362 const ClassDef &class_def = this+classDef; 1363 index = class_def.get_class (c->buffer->cur().codepoint); 1364 const RuleSet &rule_set = this+ruleSet[index]; 1365 struct ContextApplyLookupContext lookup_context = { 1366 {match_class}, 1367 &class_def 1368 }; 1369 return TRACE_RETURN (rule_set.apply (c, lookup_context)); 1370 } 1371 1372 inline bool sanitize (hb_sanitize_context_t *c) { 1373 TRACE_SANITIZE (this); 1374 return TRACE_RETURN (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this)); 1375 } 1376 1377 protected: 1378 USHORT format; /* Format identifier--format = 2 */ 1379 OffsetTo<Coverage> 1380 coverage; /* Offset to Coverage table--from 1381 * beginning of table */ 1382 OffsetTo<ClassDef> 1383 classDef; /* Offset to glyph ClassDef table--from 1384 * beginning of table */ 1385 OffsetArrayOf<RuleSet> 1386 ruleSet; /* Array of RuleSet tables 1387 * ordered by class */ 1388 public: 1389 DEFINE_SIZE_ARRAY (8, ruleSet); 1390 }; 1391 1392 1393 struct ContextFormat3 1394 { 1395 inline void closure (hb_closure_context_t *c) const 1396 { 1397 TRACE_CLOSURE (this); 1398 if (!(this+coverage[0]).intersects (c->glyphs)) 1399 return; 1400 1401 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount); 1402 struct ContextClosureLookupContext lookup_context = { 1403 {intersects_coverage}, 1404 this 1405 }; 1406 context_closure_lookup (c, 1407 glyphCount, (const USHORT *) (coverage + 1), 1408 lookupCount, lookupRecord, 1409 lookup_context); 1410 } 1411 1412 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1413 { 1414 TRACE_COLLECT_GLYPHS (this); 1415 (this+coverage[0]).add_coverage (c->input); 1416 1417 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount); 1418 struct ContextCollectGlyphsLookupContext lookup_context = { 1419 {collect_coverage}, 1420 this 1421 }; 1422 1423 context_collect_glyphs_lookup (c, 1424 glyphCount, (const USHORT *) (coverage + 1), 1425 lookupCount, lookupRecord, 1426 lookup_context); 1427 } 1428 1429 inline bool would_apply (hb_would_apply_context_t *c) const 1430 { 1431 TRACE_WOULD_APPLY (this); 1432 1433 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount); 1434 struct ContextApplyLookupContext lookup_context = { 1435 {match_coverage}, 1436 this 1437 }; 1438 return TRACE_RETURN (context_would_apply_lookup (c, glyphCount, (const USHORT *) (coverage + 1), lookupCount, lookupRecord, lookup_context)); 1439 } 1440 1441 inline const Coverage &get_coverage (void) const 1442 { 1443 return this+coverage[0]; 1444 } 1445 1446 inline bool apply (hb_apply_context_t *c) const 1447 { 1448 TRACE_APPLY (this); 1449 unsigned int index = (this+coverage[0]).get_coverage (c->buffer->cur().codepoint); 1450 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 1451 1452 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount); 1453 struct ContextApplyLookupContext lookup_context = { 1454 {match_coverage}, 1455 this 1456 }; 1457 return TRACE_RETURN (context_apply_lookup (c, glyphCount, (const USHORT *) (coverage + 1), lookupCount, lookupRecord, lookup_context)); 1458 } 1459 1460 inline bool sanitize (hb_sanitize_context_t *c) { 1461 TRACE_SANITIZE (this); 1462 if (!c->check_struct (this)) return TRACE_RETURN (false); 1463 unsigned int count = glyphCount; 1464 if (!c->check_array (coverage, coverage[0].static_size, count)) return TRACE_RETURN (false); 1465 for (unsigned int i = 0; i < count; i++) 1466 if (!coverage[i].sanitize (c, this)) return TRACE_RETURN (false); 1467 LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count); 1468 return TRACE_RETURN (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount)); 1469 } 1470 1471 protected: 1472 USHORT format; /* Format identifier--format = 3 */ 1473 USHORT glyphCount; /* Number of glyphs in the input glyph 1474 * sequence */ 1475 USHORT lookupCount; /* Number of LookupRecords */ 1476 OffsetTo<Coverage> 1477 coverage[VAR]; /* Array of offsets to Coverage 1478 * table in glyph sequence order */ 1479 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 1480 * design order */ 1481 public: 1482 DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX); 1483 }; 1484 1485 struct Context 1486 { 1487 template <typename context_t> 1488 inline typename context_t::return_t dispatch (context_t *c) const 1489 { 1490 TRACE_DISPATCH (this); 1491 switch (u.format) { 1492 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 1493 case 2: return TRACE_RETURN (c->dispatch (u.format2)); 1494 case 3: return TRACE_RETURN (c->dispatch (u.format3)); 1495 default:return TRACE_RETURN (c->default_return_value ()); 1496 } 1497 } 1498 1499 inline bool sanitize (hb_sanitize_context_t *c) { 1500 TRACE_SANITIZE (this); 1501 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 1502 switch (u.format) { 1503 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 1504 case 2: return TRACE_RETURN (u.format2.sanitize (c)); 1505 case 3: return TRACE_RETURN (u.format3.sanitize (c)); 1506 default:return TRACE_RETURN (true); 1507 } 1508 } 1509 1510 protected: 1511 union { 1512 USHORT format; /* Format identifier */ 1513 ContextFormat1 format1; 1514 ContextFormat2 format2; 1515 ContextFormat3 format3; 1516 } u; 1517 }; 1518 1519 1520 /* Chaining Contextual lookups */ 1521 1522 struct ChainContextClosureLookupContext 1523 { 1524 ContextClosureFuncs funcs; 1525 const void *intersects_data[3]; 1526 }; 1527 1528 struct ChainContextCollectGlyphsLookupContext 1529 { 1530 ContextCollectGlyphsFuncs funcs; 1531 const void *collect_data[3]; 1532 }; 1533 1534 struct ChainContextApplyLookupContext 1535 { 1536 ContextApplyFuncs funcs; 1537 const void *match_data[3]; 1538 }; 1539 1540 static inline void chain_context_closure_lookup (hb_closure_context_t *c, 1541 unsigned int backtrackCount, 1542 const USHORT backtrack[], 1543 unsigned int inputCount, /* Including the first glyph (not matched) */ 1544 const USHORT input[], /* Array of input values--start with second glyph */ 1545 unsigned int lookaheadCount, 1546 const USHORT lookahead[], 1547 unsigned int lookupCount, 1548 const LookupRecord lookupRecord[], 1549 ChainContextClosureLookupContext &lookup_context) 1550 { 1551 if (intersects_array (c, 1552 backtrackCount, backtrack, 1553 lookup_context.funcs.intersects, lookup_context.intersects_data[0]) 1554 && intersects_array (c, 1555 inputCount ? inputCount - 1 : 0, input, 1556 lookup_context.funcs.intersects, lookup_context.intersects_data[1]) 1557 && intersects_array (c, 1558 lookaheadCount, lookahead, 1559 lookup_context.funcs.intersects, lookup_context.intersects_data[2])) 1560 recurse_lookups (c, 1561 lookupCount, lookupRecord); 1562 } 1563 1564 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, 1565 unsigned int backtrackCount, 1566 const USHORT backtrack[], 1567 unsigned int inputCount, /* Including the first glyph (not matched) */ 1568 const USHORT input[], /* Array of input values--start with second glyph */ 1569 unsigned int lookaheadCount, 1570 const USHORT lookahead[], 1571 unsigned int lookupCount, 1572 const LookupRecord lookupRecord[], 1573 ChainContextCollectGlyphsLookupContext &lookup_context) 1574 { 1575 collect_array (c, c->before, 1576 backtrackCount, backtrack, 1577 lookup_context.funcs.collect, lookup_context.collect_data[0]); 1578 collect_array (c, c->input, 1579 inputCount ? inputCount - 1 : 0, input, 1580 lookup_context.funcs.collect, lookup_context.collect_data[1]); 1581 collect_array (c, c->after, 1582 lookaheadCount, lookahead, 1583 lookup_context.funcs.collect, lookup_context.collect_data[2]); 1584 recurse_lookups (c, 1585 lookupCount, lookupRecord); 1586 } 1587 1588 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c, 1589 unsigned int backtrackCount, 1590 const USHORT backtrack[] HB_UNUSED, 1591 unsigned int inputCount, /* Including the first glyph (not matched) */ 1592 const USHORT input[], /* Array of input values--start with second glyph */ 1593 unsigned int lookaheadCount, 1594 const USHORT lookahead[] HB_UNUSED, 1595 unsigned int lookupCount HB_UNUSED, 1596 const LookupRecord lookupRecord[] HB_UNUSED, 1597 ChainContextApplyLookupContext &lookup_context) 1598 { 1599 return (c->zero_context ? !backtrackCount && !lookaheadCount : true) 1600 && would_match_input (c, 1601 inputCount, input, 1602 lookup_context.funcs.match, lookup_context.match_data[1]); 1603 } 1604 1605 static inline bool chain_context_apply_lookup (hb_apply_context_t *c, 1606 unsigned int backtrackCount, 1607 const USHORT backtrack[], 1608 unsigned int inputCount, /* Including the first glyph (not matched) */ 1609 const USHORT input[], /* Array of input values--start with second glyph */ 1610 unsigned int lookaheadCount, 1611 const USHORT lookahead[], 1612 unsigned int lookupCount, 1613 const LookupRecord lookupRecord[], 1614 ChainContextApplyLookupContext &lookup_context) 1615 { 1616 unsigned int lookahead_offset = 0; 1617 return match_input (c, 1618 inputCount, input, 1619 lookup_context.funcs.match, lookup_context.match_data[1], 1620 &lookahead_offset) 1621 && match_backtrack (c, 1622 backtrackCount, backtrack, 1623 lookup_context.funcs.match, lookup_context.match_data[0]) 1624 && match_lookahead (c, 1625 lookaheadCount, lookahead, 1626 lookup_context.funcs.match, lookup_context.match_data[2], 1627 lookahead_offset) 1628 && apply_lookup (c, 1629 inputCount, input, 1630 lookup_context.funcs.match, lookup_context.match_data[1], 1631 lookupCount, lookupRecord); 1632 } 1633 1634 struct ChainRule 1635 { 1636 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 1637 { 1638 TRACE_CLOSURE (this); 1639 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1640 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1641 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1642 chain_context_closure_lookup (c, 1643 backtrack.len, backtrack.array, 1644 input.len, input.array, 1645 lookahead.len, lookahead.array, 1646 lookup.len, lookup.array, 1647 lookup_context); 1648 } 1649 1650 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const 1651 { 1652 TRACE_COLLECT_GLYPHS (this); 1653 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1654 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1655 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1656 chain_context_collect_glyphs_lookup (c, 1657 backtrack.len, backtrack.array, 1658 input.len, input.array, 1659 lookahead.len, lookahead.array, 1660 lookup.len, lookup.array, 1661 lookup_context); 1662 } 1663 1664 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1665 { 1666 TRACE_WOULD_APPLY (this); 1667 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1668 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1669 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1670 return TRACE_RETURN (chain_context_would_apply_lookup (c, 1671 backtrack.len, backtrack.array, 1672 input.len, input.array, 1673 lookahead.len, lookahead.array, lookup.len, 1674 lookup.array, lookup_context)); 1675 } 1676 1677 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1678 { 1679 TRACE_APPLY (this); 1680 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1681 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1682 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1683 return TRACE_RETURN (chain_context_apply_lookup (c, 1684 backtrack.len, backtrack.array, 1685 input.len, input.array, 1686 lookahead.len, lookahead.array, lookup.len, 1687 lookup.array, lookup_context)); 1688 } 1689 1690 inline bool sanitize (hb_sanitize_context_t *c) { 1691 TRACE_SANITIZE (this); 1692 if (!backtrack.sanitize (c)) return TRACE_RETURN (false); 1693 HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1694 if (!input.sanitize (c)) return TRACE_RETURN (false); 1695 ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1696 if (!lookahead.sanitize (c)) return TRACE_RETURN (false); 1697 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1698 return TRACE_RETURN (lookup.sanitize (c)); 1699 } 1700 1701 protected: 1702 ArrayOf<USHORT> 1703 backtrack; /* Array of backtracking values 1704 * (to be matched before the input 1705 * sequence) */ 1706 HeadlessArrayOf<USHORT> 1707 inputX; /* Array of input values (start with 1708 * second glyph) */ 1709 ArrayOf<USHORT> 1710 lookaheadX; /* Array of lookahead values's (to be 1711 * matched after the input sequence) */ 1712 ArrayOf<LookupRecord> 1713 lookupX; /* Array of LookupRecords--in 1714 * design order) */ 1715 public: 1716 DEFINE_SIZE_MIN (8); 1717 }; 1718 1719 struct ChainRuleSet 1720 { 1721 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 1722 { 1723 TRACE_CLOSURE (this); 1724 unsigned int num_rules = rule.len; 1725 for (unsigned int i = 0; i < num_rules; i++) 1726 (this+rule[i]).closure (c, lookup_context); 1727 } 1728 1729 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const 1730 { 1731 TRACE_COLLECT_GLYPHS (this); 1732 unsigned int num_rules = rule.len; 1733 for (unsigned int i = 0; i < num_rules; i++) 1734 (this+rule[i]).collect_glyphs (c, lookup_context); 1735 } 1736 1737 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1738 { 1739 TRACE_WOULD_APPLY (this); 1740 unsigned int num_rules = rule.len; 1741 for (unsigned int i = 0; i < num_rules; i++) 1742 if ((this+rule[i]).would_apply (c, lookup_context)) 1743 return TRACE_RETURN (true); 1744 1745 return TRACE_RETURN (false); 1746 } 1747 1748 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1749 { 1750 TRACE_APPLY (this); 1751 unsigned int num_rules = rule.len; 1752 for (unsigned int i = 0; i < num_rules; i++) 1753 if ((this+rule[i]).apply (c, lookup_context)) 1754 return TRACE_RETURN (true); 1755 1756 return TRACE_RETURN (false); 1757 } 1758 1759 inline bool sanitize (hb_sanitize_context_t *c) { 1760 TRACE_SANITIZE (this); 1761 return TRACE_RETURN (rule.sanitize (c, this)); 1762 } 1763 1764 protected: 1765 OffsetArrayOf<ChainRule> 1766 rule; /* Array of ChainRule tables 1767 * ordered by preference */ 1768 public: 1769 DEFINE_SIZE_ARRAY (2, rule); 1770 }; 1771 1772 struct ChainContextFormat1 1773 { 1774 inline void closure (hb_closure_context_t *c) const 1775 { 1776 TRACE_CLOSURE (this); 1777 const Coverage &cov = (this+coverage); 1778 1779 struct ChainContextClosureLookupContext lookup_context = { 1780 {intersects_glyph}, 1781 {NULL, NULL, NULL} 1782 }; 1783 1784 unsigned int count = ruleSet.len; 1785 for (unsigned int i = 0; i < count; i++) 1786 if (cov.intersects_coverage (c->glyphs, i)) { 1787 const ChainRuleSet &rule_set = this+ruleSet[i]; 1788 rule_set.closure (c, lookup_context); 1789 } 1790 } 1791 1792 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1793 { 1794 TRACE_COLLECT_GLYPHS (this); 1795 (this+coverage).add_coverage (c->input); 1796 1797 struct ChainContextCollectGlyphsLookupContext lookup_context = { 1798 {collect_glyph}, 1799 {NULL, NULL, NULL} 1800 }; 1801 1802 unsigned int count = ruleSet.len; 1803 for (unsigned int i = 0; i < count; i++) 1804 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1805 } 1806 1807 inline bool would_apply (hb_would_apply_context_t *c) const 1808 { 1809 TRACE_WOULD_APPLY (this); 1810 1811 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; 1812 struct ChainContextApplyLookupContext lookup_context = { 1813 {match_glyph}, 1814 {NULL, NULL, NULL} 1815 }; 1816 return TRACE_RETURN (rule_set.would_apply (c, lookup_context)); 1817 } 1818 1819 inline const Coverage &get_coverage (void) const 1820 { 1821 return this+coverage; 1822 } 1823 1824 inline bool apply (hb_apply_context_t *c) const 1825 { 1826 TRACE_APPLY (this); 1827 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1828 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 1829 1830 const ChainRuleSet &rule_set = this+ruleSet[index]; 1831 struct ChainContextApplyLookupContext lookup_context = { 1832 {match_glyph}, 1833 {NULL, NULL, NULL} 1834 }; 1835 return TRACE_RETURN (rule_set.apply (c, lookup_context)); 1836 } 1837 1838 inline bool sanitize (hb_sanitize_context_t *c) { 1839 TRACE_SANITIZE (this); 1840 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); 1841 } 1842 1843 protected: 1844 USHORT format; /* Format identifier--format = 1 */ 1845 OffsetTo<Coverage> 1846 coverage; /* Offset to Coverage table--from 1847 * beginning of table */ 1848 OffsetArrayOf<ChainRuleSet> 1849 ruleSet; /* Array of ChainRuleSet tables 1850 * ordered by Coverage Index */ 1851 public: 1852 DEFINE_SIZE_ARRAY (6, ruleSet); 1853 }; 1854 1855 struct ChainContextFormat2 1856 { 1857 inline void closure (hb_closure_context_t *c) const 1858 { 1859 TRACE_CLOSURE (this); 1860 if (!(this+coverage).intersects (c->glyphs)) 1861 return; 1862 1863 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1864 const ClassDef &input_class_def = this+inputClassDef; 1865 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1866 1867 struct ChainContextClosureLookupContext lookup_context = { 1868 {intersects_class}, 1869 {&backtrack_class_def, 1870 &input_class_def, 1871 &lookahead_class_def} 1872 }; 1873 1874 unsigned int count = ruleSet.len; 1875 for (unsigned int i = 0; i < count; i++) 1876 if (input_class_def.intersects_class (c->glyphs, i)) { 1877 const ChainRuleSet &rule_set = this+ruleSet[i]; 1878 rule_set.closure (c, lookup_context); 1879 } 1880 } 1881 1882 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1883 { 1884 TRACE_COLLECT_GLYPHS (this); 1885 (this+coverage).add_coverage (c->input); 1886 1887 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1888 const ClassDef &input_class_def = this+inputClassDef; 1889 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1890 1891 struct ChainContextCollectGlyphsLookupContext lookup_context = { 1892 {collect_class}, 1893 {&backtrack_class_def, 1894 &input_class_def, 1895 &lookahead_class_def} 1896 }; 1897 1898 unsigned int count = ruleSet.len; 1899 for (unsigned int i = 0; i < count; i++) 1900 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1901 } 1902 1903 inline bool would_apply (hb_would_apply_context_t *c) const 1904 { 1905 TRACE_WOULD_APPLY (this); 1906 1907 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1908 const ClassDef &input_class_def = this+inputClassDef; 1909 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1910 1911 unsigned int index = input_class_def.get_class (c->glyphs[0]); 1912 const ChainRuleSet &rule_set = this+ruleSet[index]; 1913 struct ChainContextApplyLookupContext lookup_context = { 1914 {match_class}, 1915 {&backtrack_class_def, 1916 &input_class_def, 1917 &lookahead_class_def} 1918 }; 1919 return TRACE_RETURN (rule_set.would_apply (c, lookup_context)); 1920 } 1921 1922 inline const Coverage &get_coverage (void) const 1923 { 1924 return this+coverage; 1925 } 1926 1927 inline bool apply (hb_apply_context_t *c) const 1928 { 1929 TRACE_APPLY (this); 1930 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1931 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 1932 1933 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1934 const ClassDef &input_class_def = this+inputClassDef; 1935 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1936 1937 index = input_class_def.get_class (c->buffer->cur().codepoint); 1938 const ChainRuleSet &rule_set = this+ruleSet[index]; 1939 struct ChainContextApplyLookupContext lookup_context = { 1940 {match_class}, 1941 {&backtrack_class_def, 1942 &input_class_def, 1943 &lookahead_class_def} 1944 }; 1945 return TRACE_RETURN (rule_set.apply (c, lookup_context)); 1946 } 1947 1948 inline bool sanitize (hb_sanitize_context_t *c) { 1949 TRACE_SANITIZE (this); 1950 return TRACE_RETURN (coverage.sanitize (c, this) && backtrackClassDef.sanitize (c, this) && 1951 inputClassDef.sanitize (c, this) && lookaheadClassDef.sanitize (c, this) && 1952 ruleSet.sanitize (c, this)); 1953 } 1954 1955 protected: 1956 USHORT format; /* Format identifier--format = 2 */ 1957 OffsetTo<Coverage> 1958 coverage; /* Offset to Coverage table--from 1959 * beginning of table */ 1960 OffsetTo<ClassDef> 1961 backtrackClassDef; /* Offset to glyph ClassDef table 1962 * containing backtrack sequence 1963 * data--from beginning of table */ 1964 OffsetTo<ClassDef> 1965 inputClassDef; /* Offset to glyph ClassDef 1966 * table containing input sequence 1967 * data--from beginning of table */ 1968 OffsetTo<ClassDef> 1969 lookaheadClassDef; /* Offset to glyph ClassDef table 1970 * containing lookahead sequence 1971 * data--from beginning of table */ 1972 OffsetArrayOf<ChainRuleSet> 1973 ruleSet; /* Array of ChainRuleSet tables 1974 * ordered by class */ 1975 public: 1976 DEFINE_SIZE_ARRAY (12, ruleSet); 1977 }; 1978 1979 struct ChainContextFormat3 1980 { 1981 inline void closure (hb_closure_context_t *c) const 1982 { 1983 TRACE_CLOSURE (this); 1984 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 1985 1986 if (!(this+input[0]).intersects (c->glyphs)) 1987 return; 1988 1989 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 1990 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1991 struct ChainContextClosureLookupContext lookup_context = { 1992 {intersects_coverage}, 1993 {this, this, this} 1994 }; 1995 chain_context_closure_lookup (c, 1996 backtrack.len, (const USHORT *) backtrack.array, 1997 input.len, (const USHORT *) input.array + 1, 1998 lookahead.len, (const USHORT *) lookahead.array, 1999 lookup.len, lookup.array, 2000 lookup_context); 2001 } 2002 2003 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 2004 { 2005 TRACE_COLLECT_GLYPHS (this); 2006 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2007 2008 (this+input[0]).add_coverage (c->input); 2009 2010 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2011 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2012 struct ChainContextCollectGlyphsLookupContext lookup_context = { 2013 {collect_coverage}, 2014 {this, this, this} 2015 }; 2016 chain_context_collect_glyphs_lookup (c, 2017 backtrack.len, (const USHORT *) backtrack.array, 2018 input.len, (const USHORT *) input.array + 1, 2019 lookahead.len, (const USHORT *) lookahead.array, 2020 lookup.len, lookup.array, 2021 lookup_context); 2022 } 2023 2024 inline bool would_apply (hb_would_apply_context_t *c) const 2025 { 2026 TRACE_WOULD_APPLY (this); 2027 2028 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2029 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2030 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2031 struct ChainContextApplyLookupContext lookup_context = { 2032 {match_coverage}, 2033 {this, this, this} 2034 }; 2035 return TRACE_RETURN (chain_context_would_apply_lookup (c, 2036 backtrack.len, (const USHORT *) backtrack.array, 2037 input.len, (const USHORT *) input.array + 1, 2038 lookahead.len, (const USHORT *) lookahead.array, 2039 lookup.len, lookup.array, lookup_context)); 2040 } 2041 2042 inline const Coverage &get_coverage (void) const 2043 { 2044 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2045 return this+input[0]; 2046 } 2047 2048 inline bool apply (hb_apply_context_t *c) const 2049 { 2050 TRACE_APPLY (this); 2051 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2052 2053 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint); 2054 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 2055 2056 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2057 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2058 struct ChainContextApplyLookupContext lookup_context = { 2059 {match_coverage}, 2060 {this, this, this} 2061 }; 2062 return TRACE_RETURN (chain_context_apply_lookup (c, 2063 backtrack.len, (const USHORT *) backtrack.array, 2064 input.len, (const USHORT *) input.array + 1, 2065 lookahead.len, (const USHORT *) lookahead.array, 2066 lookup.len, lookup.array, lookup_context)); 2067 } 2068 2069 inline bool sanitize (hb_sanitize_context_t *c) { 2070 TRACE_SANITIZE (this); 2071 if (!backtrack.sanitize (c, this)) return TRACE_RETURN (false); 2072 OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2073 if (!input.sanitize (c, this)) return TRACE_RETURN (false); 2074 OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2075 if (!lookahead.sanitize (c, this)) return TRACE_RETURN (false); 2076 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2077 return TRACE_RETURN (lookup.sanitize (c)); 2078 } 2079 2080 protected: 2081 USHORT format; /* Format identifier--format = 3 */ 2082 OffsetArrayOf<Coverage> 2083 backtrack; /* Array of coverage tables 2084 * in backtracking sequence, in glyph 2085 * sequence order */ 2086 OffsetArrayOf<Coverage> 2087 inputX ; /* Array of coverage 2088 * tables in input sequence, in glyph 2089 * sequence order */ 2090 OffsetArrayOf<Coverage> 2091 lookaheadX; /* Array of coverage tables 2092 * in lookahead sequence, in glyph 2093 * sequence order */ 2094 ArrayOf<LookupRecord> 2095 lookupX; /* Array of LookupRecords--in 2096 * design order) */ 2097 public: 2098 DEFINE_SIZE_MIN (10); 2099 }; 2100 2101 struct ChainContext 2102 { 2103 template <typename context_t> 2104 inline typename context_t::return_t dispatch (context_t *c) const 2105 { 2106 TRACE_DISPATCH (this); 2107 switch (u.format) { 2108 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 2109 case 2: return TRACE_RETURN (c->dispatch (u.format2)); 2110 case 3: return TRACE_RETURN (c->dispatch (u.format3)); 2111 default:return TRACE_RETURN (c->default_return_value ()); 2112 } 2113 } 2114 2115 inline bool sanitize (hb_sanitize_context_t *c) { 2116 TRACE_SANITIZE (this); 2117 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 2118 switch (u.format) { 2119 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 2120 case 2: return TRACE_RETURN (u.format2.sanitize (c)); 2121 case 3: return TRACE_RETURN (u.format3.sanitize (c)); 2122 default:return TRACE_RETURN (true); 2123 } 2124 } 2125 2126 protected: 2127 union { 2128 USHORT format; /* Format identifier */ 2129 ChainContextFormat1 format1; 2130 ChainContextFormat2 format2; 2131 ChainContextFormat3 format3; 2132 } u; 2133 }; 2134 2135 2136 struct ExtensionFormat1 2137 { 2138 inline unsigned int get_type (void) const { return extensionLookupType; } 2139 inline unsigned int get_offset (void) const { return extensionOffset; } 2140 2141 inline bool sanitize (hb_sanitize_context_t *c) { 2142 TRACE_SANITIZE (this); 2143 return TRACE_RETURN (c->check_struct (this)); 2144 } 2145 2146 protected: 2147 USHORT format; /* Format identifier. Set to 1. */ 2148 USHORT extensionLookupType; /* Lookup type of subtable referenced 2149 * by ExtensionOffset (i.e. the 2150 * extension subtable). */ 2151 ULONG extensionOffset; /* Offset to the extension subtable, 2152 * of lookup type subtable. */ 2153 public: 2154 DEFINE_SIZE_STATIC (8); 2155 }; 2156 2157 template <typename T> 2158 struct Extension 2159 { 2160 inline unsigned int get_type (void) const 2161 { 2162 switch (u.format) { 2163 case 1: return u.format1.get_type (); 2164 default:return 0; 2165 } 2166 } 2167 inline unsigned int get_offset (void) const 2168 { 2169 switch (u.format) { 2170 case 1: return u.format1.get_offset (); 2171 default:return 0; 2172 } 2173 } 2174 2175 template <typename X> 2176 inline const X& get_subtable (void) const 2177 { 2178 unsigned int offset = get_offset (); 2179 if (unlikely (!offset)) return Null(typename T::LookupSubTable); 2180 return StructAtOffset<typename T::LookupSubTable> (this, offset); 2181 } 2182 2183 template <typename context_t> 2184 inline typename context_t::return_t dispatch (context_t *c) const 2185 { 2186 return get_subtable<typename T::LookupSubTable> ().dispatch (c, get_type ()); 2187 } 2188 2189 inline bool sanitize_self (hb_sanitize_context_t *c) { 2190 TRACE_SANITIZE (this); 2191 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 2192 switch (u.format) { 2193 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 2194 default:return TRACE_RETURN (true); 2195 } 2196 } 2197 2198 inline bool sanitize (hb_sanitize_context_t *c) { 2199 TRACE_SANITIZE (this); 2200 if (!sanitize_self (c)) return TRACE_RETURN (false); 2201 unsigned int offset = get_offset (); 2202 if (unlikely (!offset)) return TRACE_RETURN (true); 2203 return TRACE_RETURN (StructAtOffset<typename T::LookupSubTable> (this, offset).sanitize (c, get_type ())); 2204 } 2205 2206 protected: 2207 union { 2208 USHORT format; /* Format identifier */ 2209 ExtensionFormat1 format1; 2210 } u; 2211 }; 2212 2213 2214 /* 2215 * GSUB/GPOS Common 2216 */ 2217 2218 struct GSUBGPOS 2219 { 2220 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB; 2221 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS; 2222 2223 inline unsigned int get_script_count (void) const 2224 { return (this+scriptList).len; } 2225 inline const Tag& get_script_tag (unsigned int i) const 2226 { return (this+scriptList).get_tag (i); } 2227 inline unsigned int get_script_tags (unsigned int start_offset, 2228 unsigned int *script_count /* IN/OUT */, 2229 hb_tag_t *script_tags /* OUT */) const 2230 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); } 2231 inline const Script& get_script (unsigned int i) const 2232 { return (this+scriptList)[i]; } 2233 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const 2234 { return (this+scriptList).find_index (tag, index); } 2235 2236 inline unsigned int get_feature_count (void) const 2237 { return (this+featureList).len; } 2238 inline const Tag& get_feature_tag (unsigned int i) const 2239 { return (this+featureList).get_tag (i); } 2240 inline unsigned int get_feature_tags (unsigned int start_offset, 2241 unsigned int *feature_count /* IN/OUT */, 2242 hb_tag_t *feature_tags /* OUT */) const 2243 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); } 2244 inline const Feature& get_feature (unsigned int i) const 2245 { return (this+featureList)[i]; } 2246 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const 2247 { return (this+featureList).find_index (tag, index); } 2248 2249 inline unsigned int get_lookup_count (void) const 2250 { return (this+lookupList).len; } 2251 inline const Lookup& get_lookup (unsigned int i) const 2252 { return (this+lookupList)[i]; } 2253 2254 inline bool sanitize (hb_sanitize_context_t *c) { 2255 TRACE_SANITIZE (this); 2256 return TRACE_RETURN (version.sanitize (c) && likely (version.major == 1) && 2257 scriptList.sanitize (c, this) && 2258 featureList.sanitize (c, this) && 2259 lookupList.sanitize (c, this)); 2260 } 2261 2262 protected: 2263 FixedVersion version; /* Version of the GSUB/GPOS table--initially set 2264 * to 0x00010000 */ 2265 OffsetTo<ScriptList> 2266 scriptList; /* ScriptList table */ 2267 OffsetTo<FeatureList> 2268 featureList; /* FeatureList table */ 2269 OffsetTo<LookupList> 2270 lookupList; /* LookupList table */ 2271 public: 2272 DEFINE_SIZE_STATIC (10); 2273 }; 2274 2275 2276 } /* namespace OT */ 2277 2278 2279 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */ 2280