1 /* 2 * Copyright 2007,2008,2009,2010 Red Hat, Inc. 3 * Copyright 2010,2012 Google, Inc. 4 * 5 * This is part of HarfBuzz, a text shaping library. 6 * 7 * Permission is hereby granted, without written agreement and without 8 * license or royalty fees, to use, copy, modify, and distribute this 9 * software and its documentation for any purpose, provided that the 10 * above copyright notice and the following two paragraphs appear in 11 * all copies of this software. 12 * 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 17 * DAMAGE. 18 * 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 24 * 25 * Red Hat Author(s): Behdad Esfahbod 26 * Google Author(s): Behdad Esfahbod 27 */ 28 29 #ifndef HB_OT_LAYOUT_GPOS_TABLE_HH 30 #define HB_OT_LAYOUT_GPOS_TABLE_HH 31 32 #include "hb-ot-layout-gsubgpos-private.hh" 33 34 35 namespace OT { 36 37 38 /* buffer **position** var allocations */ 39 #define attach_lookback() var.u16[0] /* number of glyphs to go back to attach this glyph to its base */ 40 #define cursive_chain() var.i16[1] /* character to which this connects, may be positive or negative */ 41 42 43 /* Shared Tables: ValueRecord, Anchor Table, and MarkArray */ 44 45 typedef USHORT Value; 46 47 typedef Value ValueRecord[VAR]; 48 49 struct ValueFormat : USHORT 50 { 51 enum Flags { 52 xPlacement = 0x0001, /* Includes horizontal adjustment for placement */ 53 yPlacement = 0x0002, /* Includes vertical adjustment for placement */ 54 xAdvance = 0x0004, /* Includes horizontal adjustment for advance */ 55 yAdvance = 0x0008, /* Includes vertical adjustment for advance */ 56 xPlaDevice = 0x0010, /* Includes horizontal Device table for placement */ 57 yPlaDevice = 0x0020, /* Includes vertical Device table for placement */ 58 xAdvDevice = 0x0040, /* Includes horizontal Device table for advance */ 59 yAdvDevice = 0x0080, /* Includes vertical Device table for advance */ 60 ignored = 0x0F00, /* Was used in TrueType Open for MM fonts */ 61 reserved = 0xF000, /* For future use */ 62 63 devices = 0x00F0 /* Mask for having any Device table */ 64 }; 65 66 /* All fields are options. Only those available advance the value pointer. */ 67 #if 0 68 SHORT xPlacement; /* Horizontal adjustment for 69 * placement--in design units */ 70 SHORT yPlacement; /* Vertical adjustment for 71 * placement--in design units */ 72 SHORT xAdvance; /* Horizontal adjustment for 73 * advance--in design units (only used 74 * for horizontal writing) */ 75 SHORT yAdvance; /* Vertical adjustment for advance--in 76 * design units (only used for vertical 77 * writing) */ 78 Offset xPlaDevice; /* Offset to Device table for 79 * horizontal placement--measured from 80 * beginning of PosTable (may be NULL) */ 81 Offset yPlaDevice; /* Offset to Device table for vertical 82 * placement--measured from beginning 83 * of PosTable (may be NULL) */ 84 Offset xAdvDevice; /* Offset to Device table for 85 * horizontal advance--measured from 86 * beginning of PosTable (may be NULL) */ 87 Offset yAdvDevice; /* Offset to Device table for vertical 88 * advance--measured from beginning of 89 * PosTable (may be NULL) */ 90 #endif 91 92 inline unsigned int get_len (void) const 93 { return _hb_popcount32 ((unsigned int) *this); } 94 inline unsigned int get_size (void) const 95 { return get_len () * Value::static_size; } 96 97 void apply_value (hb_font_t *font, 98 hb_direction_t direction, 99 const void *base, 100 const Value *values, 101 hb_glyph_position_t &glyph_pos) const 102 { 103 unsigned int x_ppem, y_ppem; 104 unsigned int format = *this; 105 hb_bool_t horizontal = HB_DIRECTION_IS_HORIZONTAL (direction); 106 107 if (!format) return; 108 109 if (format & xPlacement) glyph_pos.x_offset += font->em_scale_x (get_short (values++)); 110 if (format & yPlacement) glyph_pos.y_offset += font->em_scale_y (get_short (values++)); 111 if (format & xAdvance) { 112 if (likely (horizontal)) glyph_pos.x_advance += font->em_scale_x (get_short (values++)); else values++; 113 } 114 /* y_advance values grow downward but font-space grows upward, hence negation */ 115 if (format & yAdvance) { 116 if (unlikely (!horizontal)) glyph_pos.y_advance -= font->em_scale_y (get_short (values++)); else values++; 117 } 118 119 if (!has_device ()) return; 120 121 x_ppem = font->x_ppem; 122 y_ppem = font->y_ppem; 123 124 if (!x_ppem && !y_ppem) return; 125 126 /* pixel -> fractional pixel */ 127 if (format & xPlaDevice) { 128 if (x_ppem) glyph_pos.x_offset += (base + get_device (values++)).get_x_delta (font); else values++; 129 } 130 if (format & yPlaDevice) { 131 if (y_ppem) glyph_pos.y_offset += (base + get_device (values++)).get_y_delta (font); else values++; 132 } 133 if (format & xAdvDevice) { 134 if (horizontal && x_ppem) glyph_pos.x_advance += (base + get_device (values++)).get_x_delta (font); else values++; 135 } 136 if (format & yAdvDevice) { 137 /* y_advance values grow downward but font-space grows upward, hence negation */ 138 if (!horizontal && y_ppem) glyph_pos.y_advance -= (base + get_device (values++)).get_y_delta (font); else values++; 139 } 140 } 141 142 private: 143 inline bool sanitize_value_devices (hb_sanitize_context_t *c, void *base, Value *values) { 144 unsigned int format = *this; 145 146 if (format & xPlacement) values++; 147 if (format & yPlacement) values++; 148 if (format & xAdvance) values++; 149 if (format & yAdvance) values++; 150 151 if ((format & xPlaDevice) && !get_device (values++).sanitize (c, base)) return false; 152 if ((format & yPlaDevice) && !get_device (values++).sanitize (c, base)) return false; 153 if ((format & xAdvDevice) && !get_device (values++).sanitize (c, base)) return false; 154 if ((format & yAdvDevice) && !get_device (values++).sanitize (c, base)) return false; 155 156 return true; 157 } 158 159 static inline OffsetTo<Device>& get_device (Value* value) 160 { return *CastP<OffsetTo<Device> > (value); } 161 static inline const OffsetTo<Device>& get_device (const Value* value) 162 { return *CastP<OffsetTo<Device> > (value); } 163 164 static inline const SHORT& get_short (const Value* value) 165 { return *CastP<SHORT> (value); } 166 167 public: 168 169 inline bool has_device (void) const { 170 unsigned int format = *this; 171 return (format & devices) != 0; 172 } 173 174 inline bool sanitize_value (hb_sanitize_context_t *c, void *base, Value *values) { 175 TRACE_SANITIZE (this); 176 return TRACE_RETURN (c->check_range (values, get_size ()) && (!has_device () || sanitize_value_devices (c, base, values))); 177 } 178 179 inline bool sanitize_values (hb_sanitize_context_t *c, void *base, Value *values, unsigned int count) { 180 TRACE_SANITIZE (this); 181 unsigned int len = get_len (); 182 183 if (!c->check_array (values, get_size (), count)) return TRACE_RETURN (false); 184 185 if (!has_device ()) return TRACE_RETURN (true); 186 187 for (unsigned int i = 0; i < count; i++) { 188 if (!sanitize_value_devices (c, base, values)) 189 return TRACE_RETURN (false); 190 values += len; 191 } 192 193 return TRACE_RETURN (true); 194 } 195 196 /* Just sanitize referenced Device tables. Doesn't check the values themselves. */ 197 inline bool sanitize_values_stride_unsafe (hb_sanitize_context_t *c, void *base, Value *values, unsigned int count, unsigned int stride) { 198 TRACE_SANITIZE (this); 199 200 if (!has_device ()) return TRACE_RETURN (true); 201 202 for (unsigned int i = 0; i < count; i++) { 203 if (!sanitize_value_devices (c, base, values)) 204 return TRACE_RETURN (false); 205 values += stride; 206 } 207 208 return TRACE_RETURN (true); 209 } 210 }; 211 212 213 struct AnchorFormat1 214 { 215 inline void get_anchor (hb_font_t *font, hb_codepoint_t glyph_id HB_UNUSED, 216 hb_position_t *x, hb_position_t *y) const 217 { 218 *x = font->em_scale_x (xCoordinate); 219 *y = font->em_scale_y (yCoordinate); 220 } 221 222 inline bool sanitize (hb_sanitize_context_t *c) { 223 TRACE_SANITIZE (this); 224 return TRACE_RETURN (c->check_struct (this)); 225 } 226 227 protected: 228 USHORT format; /* Format identifier--format = 1 */ 229 SHORT xCoordinate; /* Horizontal value--in design units */ 230 SHORT yCoordinate; /* Vertical value--in design units */ 231 public: 232 DEFINE_SIZE_STATIC (6); 233 }; 234 235 struct AnchorFormat2 236 { 237 inline void get_anchor (hb_font_t *font, hb_codepoint_t glyph_id, 238 hb_position_t *x, hb_position_t *y) const 239 { 240 unsigned int x_ppem = font->x_ppem; 241 unsigned int y_ppem = font->y_ppem; 242 hb_position_t cx, cy; 243 hb_bool_t ret = false; 244 245 if (x_ppem || y_ppem) 246 ret = font->get_glyph_contour_point_for_origin (glyph_id, anchorPoint, HB_DIRECTION_LTR, &cx, &cy); 247 *x = x_ppem && ret ? cx : font->em_scale_x (xCoordinate); 248 *y = y_ppem && ret ? cy : font->em_scale_y (yCoordinate); 249 } 250 251 inline bool sanitize (hb_sanitize_context_t *c) { 252 TRACE_SANITIZE (this); 253 return TRACE_RETURN (c->check_struct (this)); 254 } 255 256 protected: 257 USHORT format; /* Format identifier--format = 2 */ 258 SHORT xCoordinate; /* Horizontal value--in design units */ 259 SHORT yCoordinate; /* Vertical value--in design units */ 260 USHORT anchorPoint; /* Index to glyph contour point */ 261 public: 262 DEFINE_SIZE_STATIC (8); 263 }; 264 265 struct AnchorFormat3 266 { 267 inline void get_anchor (hb_font_t *font, hb_codepoint_t glyph_id HB_UNUSED, 268 hb_position_t *x, hb_position_t *y) const 269 { 270 *x = font->em_scale_x (xCoordinate); 271 *y = font->em_scale_y (yCoordinate); 272 273 if (font->x_ppem) 274 *x += (this+xDeviceTable).get_x_delta (font); 275 if (font->y_ppem) 276 *y += (this+yDeviceTable).get_x_delta (font); 277 } 278 279 inline bool sanitize (hb_sanitize_context_t *c) { 280 TRACE_SANITIZE (this); 281 return TRACE_RETURN (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this)); 282 } 283 284 protected: 285 USHORT format; /* Format identifier--format = 3 */ 286 SHORT xCoordinate; /* Horizontal value--in design units */ 287 SHORT yCoordinate; /* Vertical value--in design units */ 288 OffsetTo<Device> 289 xDeviceTable; /* Offset to Device table for X 290 * coordinate-- from beginning of 291 * Anchor table (may be NULL) */ 292 OffsetTo<Device> 293 yDeviceTable; /* Offset to Device table for Y 294 * coordinate-- from beginning of 295 * Anchor table (may be NULL) */ 296 public: 297 DEFINE_SIZE_STATIC (10); 298 }; 299 300 struct Anchor 301 { 302 inline void get_anchor (hb_font_t *font, hb_codepoint_t glyph_id, 303 hb_position_t *x, hb_position_t *y) const 304 { 305 *x = *y = 0; 306 switch (u.format) { 307 case 1: u.format1.get_anchor (font, glyph_id, x, y); return; 308 case 2: u.format2.get_anchor (font, glyph_id, x, y); return; 309 case 3: u.format3.get_anchor (font, glyph_id, x, y); return; 310 default: return; 311 } 312 } 313 314 inline bool sanitize (hb_sanitize_context_t *c) { 315 TRACE_SANITIZE (this); 316 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 317 switch (u.format) { 318 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 319 case 2: return TRACE_RETURN (u.format2.sanitize (c)); 320 case 3: return TRACE_RETURN (u.format3.sanitize (c)); 321 default:return TRACE_RETURN (true); 322 } 323 } 324 325 protected: 326 union { 327 USHORT format; /* Format identifier */ 328 AnchorFormat1 format1; 329 AnchorFormat2 format2; 330 AnchorFormat3 format3; 331 } u; 332 public: 333 DEFINE_SIZE_UNION (2, format); 334 }; 335 336 337 struct AnchorMatrix 338 { 339 inline const Anchor& get_anchor (unsigned int row, unsigned int col, unsigned int cols, bool *found) const { 340 *found = false; 341 if (unlikely (row >= rows || col >= cols)) return Null(Anchor); 342 *found = !matrix[row * cols + col].is_null (); 343 return this+matrix[row * cols + col]; 344 } 345 346 inline bool sanitize (hb_sanitize_context_t *c, unsigned int cols) { 347 TRACE_SANITIZE (this); 348 if (!c->check_struct (this)) return TRACE_RETURN (false); 349 if (unlikely (rows > 0 && cols >= ((unsigned int) -1) / rows)) return TRACE_RETURN (false); 350 unsigned int count = rows * cols; 351 if (!c->check_array (matrix, matrix[0].static_size, count)) return TRACE_RETURN (false); 352 for (unsigned int i = 0; i < count; i++) 353 if (!matrix[i].sanitize (c, this)) return TRACE_RETURN (false); 354 return TRACE_RETURN (true); 355 } 356 357 USHORT rows; /* Number of rows */ 358 protected: 359 OffsetTo<Anchor> 360 matrix[VAR]; /* Matrix of offsets to Anchor tables-- 361 * from beginning of AnchorMatrix table */ 362 public: 363 DEFINE_SIZE_ARRAY (2, matrix); 364 }; 365 366 367 struct MarkRecord 368 { 369 friend struct MarkArray; 370 371 inline bool sanitize (hb_sanitize_context_t *c, void *base) { 372 TRACE_SANITIZE (this); 373 return TRACE_RETURN (c->check_struct (this) && markAnchor.sanitize (c, base)); 374 } 375 376 protected: 377 USHORT klass; /* Class defined for this mark */ 378 OffsetTo<Anchor> 379 markAnchor; /* Offset to Anchor table--from 380 * beginning of MarkArray table */ 381 public: 382 DEFINE_SIZE_STATIC (4); 383 }; 384 385 struct MarkArray : ArrayOf<MarkRecord> /* Array of MarkRecords--in Coverage order */ 386 { 387 inline bool apply (hb_apply_context_t *c, 388 unsigned int mark_index, unsigned int glyph_index, 389 const AnchorMatrix &anchors, unsigned int class_count, 390 unsigned int glyph_pos) const 391 { 392 TRACE_APPLY (this); 393 const MarkRecord &record = ArrayOf<MarkRecord>::operator[](mark_index); 394 unsigned int mark_class = record.klass; 395 396 const Anchor& mark_anchor = this + record.markAnchor; 397 bool found; 398 const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, class_count, &found); 399 /* If this subtable doesn't have an anchor for this base and this class, 400 * return false such that the subsequent subtables have a chance at it. */ 401 if (unlikely (!found)) return TRACE_RETURN (false); 402 403 hb_position_t mark_x, mark_y, base_x, base_y; 404 405 mark_anchor.get_anchor (c->font, c->buffer->cur().codepoint, &mark_x, &mark_y); 406 glyph_anchor.get_anchor (c->font, c->buffer->info[glyph_pos].codepoint, &base_x, &base_y); 407 408 hb_glyph_position_t &o = c->buffer->cur_pos(); 409 o.x_offset = base_x - mark_x; 410 o.y_offset = base_y - mark_y; 411 o.attach_lookback() = c->buffer->idx - glyph_pos; 412 413 c->buffer->idx++; 414 return TRACE_RETURN (true); 415 } 416 417 inline bool sanitize (hb_sanitize_context_t *c) { 418 TRACE_SANITIZE (this); 419 return TRACE_RETURN (ArrayOf<MarkRecord>::sanitize (c, this)); 420 } 421 }; 422 423 424 /* Lookups */ 425 426 struct SinglePosFormat1 427 { 428 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 429 { 430 TRACE_COLLECT_GLYPHS (this); 431 (this+coverage).add_coverage (c->input); 432 } 433 434 inline const Coverage &get_coverage (void) const 435 { 436 return this+coverage; 437 } 438 439 inline bool apply (hb_apply_context_t *c) const 440 { 441 TRACE_APPLY (this); 442 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 443 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 444 445 valueFormat.apply_value (c->font, c->direction, this, 446 values, c->buffer->cur_pos()); 447 448 c->buffer->idx++; 449 return TRACE_RETURN (true); 450 } 451 452 inline bool sanitize (hb_sanitize_context_t *c) { 453 TRACE_SANITIZE (this); 454 return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) && valueFormat.sanitize_value (c, this, values)); 455 } 456 457 protected: 458 USHORT format; /* Format identifier--format = 1 */ 459 OffsetTo<Coverage> 460 coverage; /* Offset to Coverage table--from 461 * beginning of subtable */ 462 ValueFormat valueFormat; /* Defines the types of data in the 463 * ValueRecord */ 464 ValueRecord values; /* Defines positioning 465 * value(s)--applied to all glyphs in 466 * the Coverage table */ 467 public: 468 DEFINE_SIZE_ARRAY (6, values); 469 }; 470 471 struct SinglePosFormat2 472 { 473 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 474 { 475 TRACE_COLLECT_GLYPHS (this); 476 (this+coverage).add_coverage (c->input); 477 } 478 479 inline const Coverage &get_coverage (void) const 480 { 481 return this+coverage; 482 } 483 484 inline bool apply (hb_apply_context_t *c) const 485 { 486 TRACE_APPLY (this); 487 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 488 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 489 490 if (likely (index >= valueCount)) return TRACE_RETURN (false); 491 492 valueFormat.apply_value (c->font, c->direction, this, 493 &values[index * valueFormat.get_len ()], 494 c->buffer->cur_pos()); 495 496 c->buffer->idx++; 497 return TRACE_RETURN (true); 498 } 499 500 inline bool sanitize (hb_sanitize_context_t *c) { 501 TRACE_SANITIZE (this); 502 return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) && valueFormat.sanitize_values (c, this, values, valueCount)); 503 } 504 505 protected: 506 USHORT format; /* Format identifier--format = 2 */ 507 OffsetTo<Coverage> 508 coverage; /* Offset to Coverage table--from 509 * beginning of subtable */ 510 ValueFormat valueFormat; /* Defines the types of data in the 511 * ValueRecord */ 512 USHORT valueCount; /* Number of ValueRecords */ 513 ValueRecord values; /* Array of ValueRecords--positioning 514 * values applied to glyphs */ 515 public: 516 DEFINE_SIZE_ARRAY (8, values); 517 }; 518 519 struct SinglePos 520 { 521 template <typename context_t> 522 inline typename context_t::return_t dispatch (context_t *c) const 523 { 524 TRACE_DISPATCH (this); 525 switch (u.format) { 526 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 527 case 2: return TRACE_RETURN (c->dispatch (u.format2)); 528 default:return TRACE_RETURN (c->default_return_value ()); 529 } 530 } 531 532 inline bool sanitize (hb_sanitize_context_t *c) { 533 TRACE_SANITIZE (this); 534 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 535 switch (u.format) { 536 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 537 case 2: return TRACE_RETURN (u.format2.sanitize (c)); 538 default:return TRACE_RETURN (true); 539 } 540 } 541 542 protected: 543 union { 544 USHORT format; /* Format identifier */ 545 SinglePosFormat1 format1; 546 SinglePosFormat2 format2; 547 } u; 548 }; 549 550 551 struct PairValueRecord 552 { 553 friend struct PairSet; 554 555 protected: 556 GlyphID secondGlyph; /* GlyphID of second glyph in the 557 * pair--first glyph is listed in the 558 * Coverage table */ 559 ValueRecord values; /* Positioning data for the first glyph 560 * followed by for second glyph */ 561 public: 562 DEFINE_SIZE_ARRAY (2, values); 563 }; 564 565 struct PairSet 566 { 567 friend struct PairPosFormat1; 568 569 inline void collect_glyphs (hb_collect_glyphs_context_t *c, 570 const ValueFormat *valueFormats) const 571 { 572 TRACE_COLLECT_GLYPHS (this); 573 unsigned int len1 = valueFormats[0].get_len (); 574 unsigned int len2 = valueFormats[1].get_len (); 575 unsigned int record_size = USHORT::static_size * (1 + len1 + len2); 576 577 const PairValueRecord *record = CastP<PairValueRecord> (array); 578 unsigned int count = len; 579 for (unsigned int i = 0; i < count; i++) 580 { 581 c->input->add (record->secondGlyph); 582 record = &StructAtOffset<PairValueRecord> (record, record_size); 583 } 584 } 585 586 inline bool apply (hb_apply_context_t *c, 587 const ValueFormat *valueFormats, 588 unsigned int pos) const 589 { 590 TRACE_APPLY (this); 591 unsigned int len1 = valueFormats[0].get_len (); 592 unsigned int len2 = valueFormats[1].get_len (); 593 unsigned int record_size = USHORT::static_size * (1 + len1 + len2); 594 595 const PairValueRecord *record = CastP<PairValueRecord> (array); 596 unsigned int count = len; 597 for (unsigned int i = 0; i < count; i++) 598 { 599 /* TODO bsearch */ 600 if (c->buffer->info[pos].codepoint == record->secondGlyph) 601 { 602 valueFormats[0].apply_value (c->font, c->direction, this, 603 &record->values[0], c->buffer->cur_pos()); 604 valueFormats[1].apply_value (c->font, c->direction, this, 605 &record->values[len1], c->buffer->pos[pos]); 606 if (len2) 607 pos++; 608 c->buffer->idx = pos; 609 return TRACE_RETURN (true); 610 } 611 record = &StructAtOffset<PairValueRecord> (record, record_size); 612 } 613 614 return TRACE_RETURN (false); 615 } 616 617 struct sanitize_closure_t { 618 void *base; 619 ValueFormat *valueFormats; 620 unsigned int len1; /* valueFormats[0].get_len() */ 621 unsigned int stride; /* 1 + len1 + len2 */ 622 }; 623 624 inline bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) { 625 TRACE_SANITIZE (this); 626 if (!(c->check_struct (this) 627 && c->check_array (array, USHORT::static_size * closure->stride, len))) return TRACE_RETURN (false); 628 629 unsigned int count = len; 630 PairValueRecord *record = CastP<PairValueRecord> (array); 631 return TRACE_RETURN (closure->valueFormats[0].sanitize_values_stride_unsafe (c, closure->base, &record->values[0], count, closure->stride) 632 && closure->valueFormats[1].sanitize_values_stride_unsafe (c, closure->base, &record->values[closure->len1], count, closure->stride)); 633 } 634 635 protected: 636 USHORT len; /* Number of PairValueRecords */ 637 USHORT array[VAR]; /* Array of PairValueRecords--ordered 638 * by GlyphID of the second glyph */ 639 public: 640 DEFINE_SIZE_ARRAY (2, array); 641 }; 642 643 struct PairPosFormat1 644 { 645 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 646 { 647 TRACE_COLLECT_GLYPHS (this); 648 (this+coverage).add_coverage (c->input); 649 unsigned int count = pairSet.len; 650 for (unsigned int i = 0; i < count; i++) 651 (this+pairSet[i]).collect_glyphs (c, &valueFormat1); 652 } 653 654 inline const Coverage &get_coverage (void) const 655 { 656 return this+coverage; 657 } 658 659 inline bool apply (hb_apply_context_t *c) const 660 { 661 TRACE_APPLY (this); 662 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, 1); 663 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); 664 665 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 666 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 667 668 if (!skippy_iter.next ()) return TRACE_RETURN (false); 669 670 return TRACE_RETURN ((this+pairSet[index]).apply (c, &valueFormat1, skippy_iter.idx)); 671 } 672 673 inline bool sanitize (hb_sanitize_context_t *c) { 674 TRACE_SANITIZE (this); 675 676 unsigned int len1 = valueFormat1.get_len (); 677 unsigned int len2 = valueFormat2.get_len (); 678 PairSet::sanitize_closure_t closure = { 679 this, 680 &valueFormat1, 681 len1, 682 1 + len1 + len2 683 }; 684 685 return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) && pairSet.sanitize (c, this, &closure)); 686 } 687 688 protected: 689 USHORT format; /* Format identifier--format = 1 */ 690 OffsetTo<Coverage> 691 coverage; /* Offset to Coverage table--from 692 * beginning of subtable */ 693 ValueFormat valueFormat1; /* Defines the types of data in 694 * ValueRecord1--for the first glyph 695 * in the pair--may be zero (0) */ 696 ValueFormat valueFormat2; /* Defines the types of data in 697 * ValueRecord2--for the second glyph 698 * in the pair--may be zero (0) */ 699 OffsetArrayOf<PairSet> 700 pairSet; /* Array of PairSet tables 701 * ordered by Coverage Index */ 702 public: 703 DEFINE_SIZE_ARRAY (10, pairSet); 704 }; 705 706 struct PairPosFormat2 707 { 708 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 709 { 710 TRACE_COLLECT_GLYPHS (this); 711 /* (this+coverage).add_coverage (c->input); // Don't need this. */ 712 713 unsigned int count1 = class1Count; 714 const ClassDef &klass1 = this+classDef1; 715 for (unsigned int i = 0; i < count1; i++) 716 klass1.add_class (c->input, i); 717 718 unsigned int count2 = class2Count; 719 const ClassDef &klass2 = this+classDef2; 720 for (unsigned int i = 0; i < count2; i++) 721 klass2.add_class (c->input, i); 722 } 723 724 inline const Coverage &get_coverage (void) const 725 { 726 return this+coverage; 727 } 728 729 inline bool apply (hb_apply_context_t *c) const 730 { 731 TRACE_APPLY (this); 732 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, 1); 733 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); 734 735 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 736 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 737 738 if (!skippy_iter.next ()) return TRACE_RETURN (false); 739 740 unsigned int len1 = valueFormat1.get_len (); 741 unsigned int len2 = valueFormat2.get_len (); 742 unsigned int record_len = len1 + len2; 743 744 unsigned int klass1 = (this+classDef1).get_class (c->buffer->cur().codepoint); 745 unsigned int klass2 = (this+classDef2).get_class (c->buffer->info[skippy_iter.idx].codepoint); 746 if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return TRACE_RETURN (false); 747 748 const Value *v = &values[record_len * (klass1 * class2Count + klass2)]; 749 valueFormat1.apply_value (c->font, c->direction, this, 750 v, c->buffer->cur_pos()); 751 valueFormat2.apply_value (c->font, c->direction, this, 752 v + len1, c->buffer->pos[skippy_iter.idx]); 753 754 c->buffer->idx = skippy_iter.idx; 755 if (len2) 756 c->buffer->idx++; 757 758 return TRACE_RETURN (true); 759 } 760 761 inline bool sanitize (hb_sanitize_context_t *c) { 762 TRACE_SANITIZE (this); 763 if (!(c->check_struct (this) 764 && coverage.sanitize (c, this) 765 && classDef1.sanitize (c, this) 766 && classDef2.sanitize (c, this))) return TRACE_RETURN (false); 767 768 unsigned int len1 = valueFormat1.get_len (); 769 unsigned int len2 = valueFormat2.get_len (); 770 unsigned int stride = len1 + len2; 771 unsigned int record_size = valueFormat1.get_size () + valueFormat2.get_size (); 772 unsigned int count = (unsigned int) class1Count * (unsigned int) class2Count; 773 return TRACE_RETURN (c->check_array (values, record_size, count) && 774 valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) && 775 valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride)); 776 } 777 778 protected: 779 USHORT format; /* Format identifier--format = 2 */ 780 OffsetTo<Coverage> 781 coverage; /* Offset to Coverage table--from 782 * beginning of subtable */ 783 ValueFormat valueFormat1; /* ValueRecord definition--for the 784 * first glyph of the pair--may be zero 785 * (0) */ 786 ValueFormat valueFormat2; /* ValueRecord definition--for the 787 * second glyph of the pair--may be 788 * zero (0) */ 789 OffsetTo<ClassDef> 790 classDef1; /* Offset to ClassDef table--from 791 * beginning of PairPos subtable--for 792 * the first glyph of the pair */ 793 OffsetTo<ClassDef> 794 classDef2; /* Offset to ClassDef table--from 795 * beginning of PairPos subtable--for 796 * the second glyph of the pair */ 797 USHORT class1Count; /* Number of classes in ClassDef1 798 * table--includes Class0 */ 799 USHORT class2Count; /* Number of classes in ClassDef2 800 * table--includes Class0 */ 801 ValueRecord values; /* Matrix of value pairs: 802 * class1-major, class2-minor, 803 * Each entry has value1 and value2 */ 804 public: 805 DEFINE_SIZE_ARRAY (16, values); 806 }; 807 808 struct PairPos 809 { 810 template <typename context_t> 811 inline typename context_t::return_t dispatch (context_t *c) const 812 { 813 TRACE_DISPATCH (this); 814 switch (u.format) { 815 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 816 case 2: return TRACE_RETURN (c->dispatch (u.format2)); 817 default:return TRACE_RETURN (c->default_return_value ()); 818 } 819 } 820 821 inline bool sanitize (hb_sanitize_context_t *c) { 822 TRACE_SANITIZE (this); 823 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 824 switch (u.format) { 825 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 826 case 2: return TRACE_RETURN (u.format2.sanitize (c)); 827 default:return TRACE_RETURN (true); 828 } 829 } 830 831 protected: 832 union { 833 USHORT format; /* Format identifier */ 834 PairPosFormat1 format1; 835 PairPosFormat2 format2; 836 } u; 837 }; 838 839 840 struct EntryExitRecord 841 { 842 friend struct CursivePosFormat1; 843 844 inline bool sanitize (hb_sanitize_context_t *c, void *base) { 845 TRACE_SANITIZE (this); 846 return TRACE_RETURN (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base)); 847 } 848 849 protected: 850 OffsetTo<Anchor> 851 entryAnchor; /* Offset to EntryAnchor table--from 852 * beginning of CursivePos 853 * subtable--may be NULL */ 854 OffsetTo<Anchor> 855 exitAnchor; /* Offset to ExitAnchor table--from 856 * beginning of CursivePos 857 * subtable--may be NULL */ 858 public: 859 DEFINE_SIZE_STATIC (4); 860 }; 861 862 struct CursivePosFormat1 863 { 864 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 865 { 866 TRACE_COLLECT_GLYPHS (this); 867 (this+coverage).add_coverage (c->input); 868 } 869 870 inline const Coverage &get_coverage (void) const 871 { 872 return this+coverage; 873 } 874 875 inline bool apply (hb_apply_context_t *c) const 876 { 877 TRACE_APPLY (this); 878 879 /* We don't handle mark glyphs here. */ 880 if (c->buffer->cur().glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_MARK) return TRACE_RETURN (false); 881 882 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, 1); 883 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); 884 885 const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage (c->buffer->cur().codepoint)]; 886 if (!this_record.exitAnchor) return TRACE_RETURN (false); 887 888 if (!skippy_iter.next ()) return TRACE_RETURN (false); 889 890 const EntryExitRecord &next_record = entryExitRecord[(this+coverage).get_coverage (c->buffer->info[skippy_iter.idx].codepoint)]; 891 if (!next_record.entryAnchor) return TRACE_RETURN (false); 892 893 unsigned int i = c->buffer->idx; 894 unsigned int j = skippy_iter.idx; 895 896 hb_position_t entry_x, entry_y, exit_x, exit_y; 897 (this+this_record.exitAnchor).get_anchor (c->font, c->buffer->info[i].codepoint, &exit_x, &exit_y); 898 (this+next_record.entryAnchor).get_anchor (c->font, c->buffer->info[j].codepoint, &entry_x, &entry_y); 899 900 hb_glyph_position_t *pos = c->buffer->pos; 901 902 hb_position_t d; 903 /* Main-direction adjustment */ 904 switch (c->direction) { 905 case HB_DIRECTION_LTR: 906 pos[i].x_advance = exit_x + pos[i].x_offset; 907 908 d = entry_x + pos[j].x_offset; 909 pos[j].x_advance -= d; 910 pos[j].x_offset -= d; 911 break; 912 case HB_DIRECTION_RTL: 913 d = exit_x + pos[i].x_offset; 914 pos[i].x_advance -= d; 915 pos[i].x_offset -= d; 916 917 pos[j].x_advance = entry_x + pos[j].x_offset; 918 break; 919 case HB_DIRECTION_TTB: 920 pos[i].y_advance = exit_y + pos[i].y_offset; 921 922 d = entry_y + pos[j].y_offset; 923 pos[j].y_advance -= d; 924 pos[j].y_offset -= d; 925 break; 926 case HB_DIRECTION_BTT: 927 d = exit_y + pos[i].y_offset; 928 pos[i].y_advance -= d; 929 pos[i].y_offset -= d; 930 931 pos[j].y_advance = entry_y; 932 break; 933 case HB_DIRECTION_INVALID: 934 default: 935 break; 936 } 937 938 /* Cross-direction adjustment */ 939 if (c->lookup_props & LookupFlag::RightToLeft) { 940 pos[i].cursive_chain() = j - i; 941 if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction))) 942 pos[i].y_offset = entry_y - exit_y; 943 else 944 pos[i].x_offset = entry_x - exit_x; 945 } else { 946 pos[j].cursive_chain() = i - j; 947 if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction))) 948 pos[j].y_offset = exit_y - entry_y; 949 else 950 pos[j].x_offset = exit_x - entry_x; 951 } 952 953 c->buffer->idx = j; 954 return TRACE_RETURN (true); 955 } 956 957 inline bool sanitize (hb_sanitize_context_t *c) { 958 TRACE_SANITIZE (this); 959 return TRACE_RETURN (coverage.sanitize (c, this) && entryExitRecord.sanitize (c, this)); 960 } 961 962 protected: 963 USHORT format; /* Format identifier--format = 1 */ 964 OffsetTo<Coverage> 965 coverage; /* Offset to Coverage table--from 966 * beginning of subtable */ 967 ArrayOf<EntryExitRecord> 968 entryExitRecord; /* Array of EntryExit records--in 969 * Coverage Index order */ 970 public: 971 DEFINE_SIZE_ARRAY (6, entryExitRecord); 972 }; 973 974 struct CursivePos 975 { 976 template <typename context_t> 977 inline typename context_t::return_t dispatch (context_t *c) const 978 { 979 TRACE_DISPATCH (this); 980 switch (u.format) { 981 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 982 default:return TRACE_RETURN (c->default_return_value ()); 983 } 984 } 985 986 inline bool sanitize (hb_sanitize_context_t *c) { 987 TRACE_SANITIZE (this); 988 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 989 switch (u.format) { 990 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 991 default:return TRACE_RETURN (true); 992 } 993 } 994 995 protected: 996 union { 997 USHORT format; /* Format identifier */ 998 CursivePosFormat1 format1; 999 } u; 1000 }; 1001 1002 1003 typedef AnchorMatrix BaseArray; /* base-major-- 1004 * in order of BaseCoverage Index--, 1005 * mark-minor-- 1006 * ordered by class--zero-based. */ 1007 1008 struct MarkBasePosFormat1 1009 { 1010 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1011 { 1012 TRACE_COLLECT_GLYPHS (this); 1013 (this+markCoverage).add_coverage (c->input); 1014 (this+baseCoverage).add_coverage (c->input); 1015 } 1016 1017 inline const Coverage &get_coverage (void) const 1018 { 1019 return this+markCoverage; 1020 } 1021 1022 inline bool apply (hb_apply_context_t *c) const 1023 { 1024 TRACE_APPLY (this); 1025 unsigned int mark_index = (this+markCoverage).get_coverage (c->buffer->cur().codepoint); 1026 if (likely (mark_index == NOT_COVERED)) return TRACE_RETURN (false); 1027 1028 /* now we search backwards for a non-mark glyph */ 1029 hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, c->buffer->idx, 1); 1030 skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks); 1031 do { 1032 if (!skippy_iter.prev ()) return TRACE_RETURN (false); 1033 /* We only want to attach to the first of a MultipleSubst sequence. Reject others. */ 1034 if (0 == get_lig_comp (c->buffer->info[skippy_iter.idx])) break; 1035 skippy_iter.reject (); 1036 } while (1); 1037 1038 /* The following assertion is too strong, so we've disabled it. */ 1039 if (!(c->buffer->info[skippy_iter.idx].glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_BASE_GLYPH)) {/*return TRACE_RETURN (false);*/} 1040 1041 unsigned int base_index = (this+baseCoverage).get_coverage (c->buffer->info[skippy_iter.idx].codepoint); 1042 if (base_index == NOT_COVERED) return TRACE_RETURN (false); 1043 1044 return TRACE_RETURN ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx)); 1045 } 1046 1047 inline bool sanitize (hb_sanitize_context_t *c) { 1048 TRACE_SANITIZE (this); 1049 return TRACE_RETURN (c->check_struct (this) && markCoverage.sanitize (c, this) && baseCoverage.sanitize (c, this) && 1050 markArray.sanitize (c, this) && baseArray.sanitize (c, this, (unsigned int) classCount)); 1051 } 1052 1053 protected: 1054 USHORT format; /* Format identifier--format = 1 */ 1055 OffsetTo<Coverage> 1056 markCoverage; /* Offset to MarkCoverage table--from 1057 * beginning of MarkBasePos subtable */ 1058 OffsetTo<Coverage> 1059 baseCoverage; /* Offset to BaseCoverage table--from 1060 * beginning of MarkBasePos subtable */ 1061 USHORT classCount; /* Number of classes defined for marks */ 1062 OffsetTo<MarkArray> 1063 markArray; /* Offset to MarkArray table--from 1064 * beginning of MarkBasePos subtable */ 1065 OffsetTo<BaseArray> 1066 baseArray; /* Offset to BaseArray table--from 1067 * beginning of MarkBasePos subtable */ 1068 public: 1069 DEFINE_SIZE_STATIC (12); 1070 }; 1071 1072 struct MarkBasePos 1073 { 1074 template <typename context_t> 1075 inline typename context_t::return_t dispatch (context_t *c) const 1076 { 1077 TRACE_DISPATCH (this); 1078 switch (u.format) { 1079 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 1080 default:return TRACE_RETURN (c->default_return_value ()); 1081 } 1082 } 1083 1084 inline bool sanitize (hb_sanitize_context_t *c) { 1085 TRACE_SANITIZE (this); 1086 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 1087 switch (u.format) { 1088 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 1089 default:return TRACE_RETURN (true); 1090 } 1091 } 1092 1093 protected: 1094 union { 1095 USHORT format; /* Format identifier */ 1096 MarkBasePosFormat1 format1; 1097 } u; 1098 }; 1099 1100 1101 typedef AnchorMatrix LigatureAttach; /* component-major-- 1102 * in order of writing direction--, 1103 * mark-minor-- 1104 * ordered by class--zero-based. */ 1105 1106 typedef OffsetListOf<LigatureAttach> LigatureArray; 1107 /* Array of LigatureAttach 1108 * tables ordered by 1109 * LigatureCoverage Index */ 1110 1111 struct MarkLigPosFormat1 1112 { 1113 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1114 { 1115 TRACE_COLLECT_GLYPHS (this); 1116 (this+markCoverage).add_coverage (c->input); 1117 (this+ligatureCoverage).add_coverage (c->input); 1118 } 1119 1120 inline const Coverage &get_coverage (void) const 1121 { 1122 return this+markCoverage; 1123 } 1124 1125 inline bool apply (hb_apply_context_t *c) const 1126 { 1127 TRACE_APPLY (this); 1128 unsigned int mark_index = (this+markCoverage).get_coverage (c->buffer->cur().codepoint); 1129 if (likely (mark_index == NOT_COVERED)) return TRACE_RETURN (false); 1130 1131 /* now we search backwards for a non-mark glyph */ 1132 hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, c->buffer->idx, 1); 1133 skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks); 1134 if (!skippy_iter.prev ()) return TRACE_RETURN (false); 1135 1136 /* The following assertion is too strong, so we've disabled it. */ 1137 if (!(c->buffer->info[skippy_iter.idx].glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE)) {/*return TRACE_RETURN (false);*/} 1138 1139 unsigned int j = skippy_iter.idx; 1140 unsigned int lig_index = (this+ligatureCoverage).get_coverage (c->buffer->info[j].codepoint); 1141 if (lig_index == NOT_COVERED) return TRACE_RETURN (false); 1142 1143 const LigatureArray& lig_array = this+ligatureArray; 1144 const LigatureAttach& lig_attach = lig_array[lig_index]; 1145 1146 /* Find component to attach to */ 1147 unsigned int comp_count = lig_attach.rows; 1148 if (unlikely (!comp_count)) return TRACE_RETURN (false); 1149 1150 /* We must now check whether the ligature ID of the current mark glyph 1151 * is identical to the ligature ID of the found ligature. If yes, we 1152 * can directly use the component index. If not, we attach the mark 1153 * glyph to the last component of the ligature. */ 1154 unsigned int comp_index; 1155 unsigned int lig_id = get_lig_id (c->buffer->info[j]); 1156 unsigned int mark_id = get_lig_id (c->buffer->cur()); 1157 unsigned int mark_comp = get_lig_comp (c->buffer->cur()); 1158 if (lig_id && lig_id == mark_id && mark_comp > 0) 1159 comp_index = MIN (comp_count, get_lig_comp (c->buffer->cur())) - 1; 1160 else 1161 comp_index = comp_count - 1; 1162 1163 return TRACE_RETURN ((this+markArray).apply (c, mark_index, comp_index, lig_attach, classCount, j)); 1164 } 1165 1166 inline bool sanitize (hb_sanitize_context_t *c) { 1167 TRACE_SANITIZE (this); 1168 return TRACE_RETURN (c->check_struct (this) && markCoverage.sanitize (c, this) && ligatureCoverage.sanitize (c, this) && 1169 markArray.sanitize (c, this) && ligatureArray.sanitize (c, this, (unsigned int) classCount)); 1170 } 1171 1172 protected: 1173 USHORT format; /* Format identifier--format = 1 */ 1174 OffsetTo<Coverage> 1175 markCoverage; /* Offset to Mark Coverage table--from 1176 * beginning of MarkLigPos subtable */ 1177 OffsetTo<Coverage> 1178 ligatureCoverage; /* Offset to Ligature Coverage 1179 * table--from beginning of MarkLigPos 1180 * subtable */ 1181 USHORT classCount; /* Number of defined mark classes */ 1182 OffsetTo<MarkArray> 1183 markArray; /* Offset to MarkArray table--from 1184 * beginning of MarkLigPos subtable */ 1185 OffsetTo<LigatureArray> 1186 ligatureArray; /* Offset to LigatureArray table--from 1187 * beginning of MarkLigPos subtable */ 1188 public: 1189 DEFINE_SIZE_STATIC (12); 1190 }; 1191 1192 struct MarkLigPos 1193 { 1194 template <typename context_t> 1195 inline typename context_t::return_t dispatch (context_t *c) const 1196 { 1197 TRACE_DISPATCH (this); 1198 switch (u.format) { 1199 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 1200 default:return TRACE_RETURN (c->default_return_value ()); 1201 } 1202 } 1203 1204 inline bool sanitize (hb_sanitize_context_t *c) { 1205 TRACE_SANITIZE (this); 1206 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 1207 switch (u.format) { 1208 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 1209 default:return TRACE_RETURN (true); 1210 } 1211 } 1212 1213 protected: 1214 union { 1215 USHORT format; /* Format identifier */ 1216 MarkLigPosFormat1 format1; 1217 } u; 1218 }; 1219 1220 1221 typedef AnchorMatrix Mark2Array; /* mark2-major-- 1222 * in order of Mark2Coverage Index--, 1223 * mark1-minor-- 1224 * ordered by class--zero-based. */ 1225 1226 struct MarkMarkPosFormat1 1227 { 1228 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1229 { 1230 TRACE_COLLECT_GLYPHS (this); 1231 (this+mark1Coverage).add_coverage (c->input); 1232 (this+mark2Coverage).add_coverage (c->input); 1233 } 1234 1235 inline const Coverage &get_coverage (void) const 1236 { 1237 return this+mark1Coverage; 1238 } 1239 1240 inline bool apply (hb_apply_context_t *c) const 1241 { 1242 TRACE_APPLY (this); 1243 unsigned int mark1_index = (this+mark1Coverage).get_coverage (c->buffer->cur().codepoint); 1244 if (likely (mark1_index == NOT_COVERED)) return TRACE_RETURN (false); 1245 1246 /* now we search backwards for a suitable mark glyph until a non-mark glyph */ 1247 hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, c->buffer->idx, 1); 1248 skippy_iter.set_lookup_props (c->lookup_props & ~LookupFlag::IgnoreFlags); 1249 if (!skippy_iter.prev ()) return TRACE_RETURN (false); 1250 1251 if (!(c->buffer->info[skippy_iter.idx].glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_MARK)) { return TRACE_RETURN (false); } 1252 1253 unsigned int j = skippy_iter.idx; 1254 1255 unsigned int id1 = get_lig_id (c->buffer->cur()); 1256 unsigned int id2 = get_lig_id (c->buffer->info[j]); 1257 unsigned int comp1 = get_lig_comp (c->buffer->cur()); 1258 unsigned int comp2 = get_lig_comp (c->buffer->info[j]); 1259 1260 if (likely (id1 == id2)) { 1261 if (id1 == 0) /* Marks belonging to the same base. */ 1262 goto good; 1263 else if (comp1 == comp2) /* Marks belonging to the same ligature component. */ 1264 goto good; 1265 } else { 1266 /* If ligature ids don't match, it may be the case that one of the marks 1267 * itself is a ligature. In which case match. */ 1268 if ((id1 > 0 && !comp1) || (id2 > 0 && !comp2)) 1269 goto good; 1270 } 1271 1272 /* Didn't match. */ 1273 return TRACE_RETURN (false); 1274 1275 good: 1276 unsigned int mark2_index = (this+mark2Coverage).get_coverage (c->buffer->info[j].codepoint); 1277 if (mark2_index == NOT_COVERED) return TRACE_RETURN (false); 1278 1279 return TRACE_RETURN ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j)); 1280 } 1281 1282 inline bool sanitize (hb_sanitize_context_t *c) { 1283 TRACE_SANITIZE (this); 1284 return TRACE_RETURN (c->check_struct (this) && mark1Coverage.sanitize (c, this) && 1285 mark2Coverage.sanitize (c, this) && mark1Array.sanitize (c, this) 1286 && mark2Array.sanitize (c, this, (unsigned int) classCount)); 1287 } 1288 1289 protected: 1290 USHORT format; /* Format identifier--format = 1 */ 1291 OffsetTo<Coverage> 1292 mark1Coverage; /* Offset to Combining Mark1 Coverage 1293 * table--from beginning of MarkMarkPos 1294 * subtable */ 1295 OffsetTo<Coverage> 1296 mark2Coverage; /* Offset to Combining Mark2 Coverage 1297 * table--from beginning of MarkMarkPos 1298 * subtable */ 1299 USHORT classCount; /* Number of defined mark classes */ 1300 OffsetTo<MarkArray> 1301 mark1Array; /* Offset to Mark1Array table--from 1302 * beginning of MarkMarkPos subtable */ 1303 OffsetTo<Mark2Array> 1304 mark2Array; /* Offset to Mark2Array table--from 1305 * beginning of MarkMarkPos subtable */ 1306 public: 1307 DEFINE_SIZE_STATIC (12); 1308 }; 1309 1310 struct MarkMarkPos 1311 { 1312 template <typename context_t> 1313 inline typename context_t::return_t dispatch (context_t *c) const 1314 { 1315 TRACE_DISPATCH (this); 1316 switch (u.format) { 1317 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 1318 default:return TRACE_RETURN (c->default_return_value ()); 1319 } 1320 } 1321 1322 inline bool sanitize (hb_sanitize_context_t *c) { 1323 TRACE_SANITIZE (this); 1324 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 1325 switch (u.format) { 1326 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 1327 default:return TRACE_RETURN (true); 1328 } 1329 } 1330 1331 protected: 1332 union { 1333 USHORT format; /* Format identifier */ 1334 MarkMarkPosFormat1 format1; 1335 } u; 1336 }; 1337 1338 1339 struct ContextPos : Context {}; 1340 1341 struct ChainContextPos : ChainContext {}; 1342 1343 struct ExtensionPos : Extension<ExtensionPos> 1344 { 1345 typedef struct PosLookupSubTable LookupSubTable; 1346 }; 1347 1348 1349 1350 /* 1351 * PosLookup 1352 */ 1353 1354 1355 struct PosLookupSubTable 1356 { 1357 friend struct PosLookup; 1358 1359 enum Type { 1360 Single = 1, 1361 Pair = 2, 1362 Cursive = 3, 1363 MarkBase = 4, 1364 MarkLig = 5, 1365 MarkMark = 6, 1366 Context = 7, 1367 ChainContext = 8, 1368 Extension = 9 1369 }; 1370 1371 template <typename context_t> 1372 inline typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type) const 1373 { 1374 TRACE_DISPATCH (this); 1375 switch (lookup_type) { 1376 case Single: return TRACE_RETURN (u.single.dispatch (c)); 1377 case Pair: return TRACE_RETURN (u.pair.dispatch (c)); 1378 case Cursive: return TRACE_RETURN (u.cursive.dispatch (c)); 1379 case MarkBase: return TRACE_RETURN (u.markBase.dispatch (c)); 1380 case MarkLig: return TRACE_RETURN (u.markLig.dispatch (c)); 1381 case MarkMark: return TRACE_RETURN (u.markMark.dispatch (c)); 1382 case Context: return TRACE_RETURN (u.context.dispatch (c)); 1383 case ChainContext: return TRACE_RETURN (u.chainContext.dispatch (c)); 1384 case Extension: return TRACE_RETURN (u.extension.dispatch (c)); 1385 default: return TRACE_RETURN (c->default_return_value ()); 1386 } 1387 } 1388 1389 inline bool sanitize (hb_sanitize_context_t *c, unsigned int lookup_type) { 1390 TRACE_SANITIZE (this); 1391 if (!u.header.sub_format.sanitize (c)) 1392 return TRACE_RETURN (false); 1393 switch (lookup_type) { 1394 case Single: return TRACE_RETURN (u.single.sanitize (c)); 1395 case Pair: return TRACE_RETURN (u.pair.sanitize (c)); 1396 case Cursive: return TRACE_RETURN (u.cursive.sanitize (c)); 1397 case MarkBase: return TRACE_RETURN (u.markBase.sanitize (c)); 1398 case MarkLig: return TRACE_RETURN (u.markLig.sanitize (c)); 1399 case MarkMark: return TRACE_RETURN (u.markMark.sanitize (c)); 1400 case Context: return TRACE_RETURN (u.context.sanitize (c)); 1401 case ChainContext: return TRACE_RETURN (u.chainContext.sanitize (c)); 1402 case Extension: return TRACE_RETURN (u.extension.sanitize (c)); 1403 default: return TRACE_RETURN (true); 1404 } 1405 } 1406 1407 protected: 1408 union { 1409 struct { 1410 USHORT sub_format; 1411 } header; 1412 SinglePos single; 1413 PairPos pair; 1414 CursivePos cursive; 1415 MarkBasePos markBase; 1416 MarkLigPos markLig; 1417 MarkMarkPos markMark; 1418 ContextPos context; 1419 ChainContextPos chainContext; 1420 ExtensionPos extension; 1421 } u; 1422 public: 1423 DEFINE_SIZE_UNION (2, header.sub_format); 1424 }; 1425 1426 1427 struct PosLookup : Lookup 1428 { 1429 inline const PosLookupSubTable& get_subtable (unsigned int i) const 1430 { return this+CastR<OffsetArrayOf<PosLookupSubTable> > (subTable)[i]; } 1431 1432 inline hb_collect_glyphs_context_t::return_t collect_glyphs_lookup (hb_collect_glyphs_context_t *c) const 1433 { 1434 TRACE_COLLECT_GLYPHS (this); 1435 c->set_recurse_func (NULL); 1436 return TRACE_RETURN (dispatch (c)); 1437 } 1438 1439 template <typename set_t> 1440 inline void add_coverage (set_t *glyphs) const 1441 { 1442 hb_get_coverage_context_t c; 1443 const Coverage *last = NULL; 1444 unsigned int count = get_subtable_count (); 1445 for (unsigned int i = 0; i < count; i++) { 1446 const Coverage *coverage = &get_subtable (i).dispatch (&c, get_type ()); 1447 if (coverage != last) { 1448 coverage->add_coverage (glyphs); 1449 last = coverage; 1450 } 1451 } 1452 } 1453 1454 inline bool apply_once (hb_apply_context_t *c) const 1455 { 1456 TRACE_APPLY (this); 1457 if (!c->check_glyph_property (&c->buffer->cur(), c->lookup_props)) 1458 return TRACE_RETURN (false); 1459 return TRACE_RETURN (dispatch (c)); 1460 } 1461 1462 static bool apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index); 1463 1464 inline bool apply_string (hb_apply_context_t *c, const hb_set_digest_t *digest) const 1465 { 1466 bool ret = false; 1467 1468 if (unlikely (!c->buffer->len || !c->lookup_mask)) 1469 return false; 1470 1471 c->set_recurse_func (apply_recurse_func); 1472 c->set_lookup (*this); 1473 1474 c->buffer->idx = 0; 1475 1476 while (c->buffer->idx < c->buffer->len) 1477 { 1478 if (digest->may_have (c->buffer->cur().codepoint) && 1479 (c->buffer->cur().mask & c->lookup_mask) && 1480 apply_once (c)) 1481 ret = true; 1482 else 1483 c->buffer->idx++; 1484 } 1485 1486 return ret; 1487 } 1488 1489 template <typename context_t> 1490 static inline typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index); 1491 1492 template <typename context_t> 1493 inline typename context_t::return_t dispatch (context_t *c) const 1494 { 1495 TRACE_DISPATCH (this); 1496 unsigned int lookup_type = get_type (); 1497 unsigned int count = get_subtable_count (); 1498 for (unsigned int i = 0; i < count; i++) { 1499 typename context_t::return_t r = get_subtable (i).dispatch (c, lookup_type); 1500 if (c->stop_sublookup_iteration (r)) 1501 return TRACE_RETURN (r); 1502 } 1503 return TRACE_RETURN (c->default_return_value ()); 1504 } 1505 1506 inline bool sanitize (hb_sanitize_context_t *c) { 1507 TRACE_SANITIZE (this); 1508 if (unlikely (!Lookup::sanitize (c))) return TRACE_RETURN (false); 1509 OffsetArrayOf<PosLookupSubTable> &list = CastR<OffsetArrayOf<PosLookupSubTable> > (subTable); 1510 return TRACE_RETURN (list.sanitize (c, this, get_type ())); 1511 } 1512 }; 1513 1514 typedef OffsetListOf<PosLookup> PosLookupList; 1515 1516 /* 1517 * GPOS -- The Glyph Positioning Table 1518 */ 1519 1520 struct GPOS : GSUBGPOS 1521 { 1522 static const hb_tag_t Tag = HB_OT_TAG_GPOS; 1523 1524 inline const PosLookup& get_lookup (unsigned int i) const 1525 { return CastR<PosLookup> (GSUBGPOS::get_lookup (i)); } 1526 1527 static inline void position_start (hb_font_t *font, hb_buffer_t *buffer); 1528 static inline void position_finish (hb_font_t *font, hb_buffer_t *buffer); 1529 1530 inline bool sanitize (hb_sanitize_context_t *c) { 1531 TRACE_SANITIZE (this); 1532 if (unlikely (!GSUBGPOS::sanitize (c))) return TRACE_RETURN (false); 1533 OffsetTo<PosLookupList> &list = CastR<OffsetTo<PosLookupList> > (lookupList); 1534 return TRACE_RETURN (list.sanitize (c, this)); 1535 } 1536 public: 1537 DEFINE_SIZE_STATIC (10); 1538 }; 1539 1540 1541 static void 1542 fix_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction) 1543 { 1544 unsigned int j = pos[i].cursive_chain(); 1545 if (likely (!j)) 1546 return; 1547 1548 j += i; 1549 1550 pos[i].cursive_chain() = 0; 1551 1552 fix_cursive_minor_offset (pos, j, direction); 1553 1554 if (HB_DIRECTION_IS_HORIZONTAL (direction)) 1555 pos[i].y_offset += pos[j].y_offset; 1556 else 1557 pos[i].x_offset += pos[j].x_offset; 1558 } 1559 1560 static void 1561 fix_mark_attachment (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction) 1562 { 1563 if (likely (!(pos[i].attach_lookback()))) 1564 return; 1565 1566 unsigned int j = i - pos[i].attach_lookback(); 1567 1568 pos[i].x_offset += pos[j].x_offset; 1569 pos[i].y_offset += pos[j].y_offset; 1570 1571 if (HB_DIRECTION_IS_FORWARD (direction)) 1572 for (unsigned int k = j; k < i; k++) { 1573 pos[i].x_offset -= pos[k].x_advance; 1574 pos[i].y_offset -= pos[k].y_advance; 1575 } 1576 else 1577 for (unsigned int k = j + 1; k < i + 1; k++) { 1578 pos[i].x_offset += pos[k].x_advance; 1579 pos[i].y_offset += pos[k].y_advance; 1580 } 1581 } 1582 1583 void 1584 GPOS::position_start (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer) 1585 { 1586 buffer->clear_positions (); 1587 1588 unsigned int count = buffer->len; 1589 for (unsigned int i = 0; i < count; i++) 1590 buffer->pos[i].attach_lookback() = buffer->pos[i].cursive_chain() = 0; 1591 } 1592 1593 void 1594 GPOS::position_finish (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer) 1595 { 1596 unsigned int len; 1597 hb_glyph_position_t *pos = hb_buffer_get_glyph_positions (buffer, &len); 1598 hb_direction_t direction = buffer->props.direction; 1599 1600 /* Handle cursive connections */ 1601 for (unsigned int i = 0; i < len; i++) 1602 fix_cursive_minor_offset (pos, i, direction); 1603 1604 /* Handle attachments */ 1605 for (unsigned int i = 0; i < len; i++) 1606 fix_mark_attachment (pos, i, direction); 1607 1608 HB_BUFFER_DEALLOCATE_VAR (buffer, syllable); 1609 HB_BUFFER_DEALLOCATE_VAR (buffer, lig_props); 1610 HB_BUFFER_DEALLOCATE_VAR (buffer, glyph_props); 1611 } 1612 1613 1614 /* Out-of-class implementation for methods recursing */ 1615 1616 template <typename context_t> 1617 inline typename context_t::return_t PosLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index) 1618 { 1619 const GPOS &gpos = *(hb_ot_layout_from_face (c->face)->gpos); 1620 const PosLookup &l = gpos.get_lookup (lookup_index); 1621 return l.dispatch (c); 1622 } 1623 1624 inline bool PosLookup::apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index) 1625 { 1626 const GPOS &gpos = *(hb_ot_layout_from_face (c->face)->gpos); 1627 const PosLookup &l = gpos.get_lookup (lookup_index); 1628 unsigned int saved_lookup_props = c->lookup_props; 1629 c->set_lookup (l); 1630 bool ret = l.apply_once (c); 1631 c->lookup_props = saved_lookup_props; 1632 return ret; 1633 } 1634 1635 1636 #undef attach_lookback 1637 #undef cursive_chain 1638 1639 1640 } /* namespace OT */ 1641 1642 1643 #endif /* HB_OT_LAYOUT_GPOS_TABLE_HH */ 1644