1 /* 2 * Copyright 2007,2008,2009,2010 Red Hat, Inc. 3 * Copyright 2010,2012,2013 Google, Inc. 4 * 5 * This is part of HarfBuzz, a text shaping library. 6 * 7 * Permission is hereby granted, without written agreement and without 8 * license or royalty fees, to use, copy, modify, and distribute this 9 * software and its documentation for any purpose, provided that the 10 * above copyright notice and the following two paragraphs appear in 11 * all copies of this software. 12 * 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 17 * DAMAGE. 18 * 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 24 * 25 * Red Hat Author(s): Behdad Esfahbod 26 * Google Author(s): Behdad Esfahbod 27 */ 28 29 #ifndef HB_OT_LAYOUT_GPOS_TABLE_HH 30 #define HB_OT_LAYOUT_GPOS_TABLE_HH 31 32 #include "hb-ot-layout-gsubgpos-private.hh" 33 34 35 namespace OT { 36 37 38 /* buffer **position** var allocations */ 39 #define attach_lookback() var.u16[0] /* number of glyphs to go back to attach this glyph to its base */ 40 #define cursive_chain() var.i16[1] /* character to which this connects, may be positive or negative */ 41 42 43 /* Shared Tables: ValueRecord, Anchor Table, and MarkArray */ 44 45 typedef USHORT Value; 46 47 typedef Value ValueRecord[VAR]; 48 49 struct ValueFormat : USHORT 50 { 51 enum Flags { 52 xPlacement = 0x0001, /* Includes horizontal adjustment for placement */ 53 yPlacement = 0x0002, /* Includes vertical adjustment for placement */ 54 xAdvance = 0x0004, /* Includes horizontal adjustment for advance */ 55 yAdvance = 0x0008, /* Includes vertical adjustment for advance */ 56 xPlaDevice = 0x0010, /* Includes horizontal Device table for placement */ 57 yPlaDevice = 0x0020, /* Includes vertical Device table for placement */ 58 xAdvDevice = 0x0040, /* Includes horizontal Device table for advance */ 59 yAdvDevice = 0x0080, /* Includes vertical Device table for advance */ 60 ignored = 0x0F00, /* Was used in TrueType Open for MM fonts */ 61 reserved = 0xF000, /* For future use */ 62 63 devices = 0x00F0 /* Mask for having any Device table */ 64 }; 65 66 /* All fields are options. Only those available advance the value pointer. */ 67 #if 0 68 SHORT xPlacement; /* Horizontal adjustment for 69 * placement--in design units */ 70 SHORT yPlacement; /* Vertical adjustment for 71 * placement--in design units */ 72 SHORT xAdvance; /* Horizontal adjustment for 73 * advance--in design units (only used 74 * for horizontal writing) */ 75 SHORT yAdvance; /* Vertical adjustment for advance--in 76 * design units (only used for vertical 77 * writing) */ 78 Offset xPlaDevice; /* Offset to Device table for 79 * horizontal placement--measured from 80 * beginning of PosTable (may be NULL) */ 81 Offset yPlaDevice; /* Offset to Device table for vertical 82 * placement--measured from beginning 83 * of PosTable (may be NULL) */ 84 Offset xAdvDevice; /* Offset to Device table for 85 * horizontal advance--measured from 86 * beginning of PosTable (may be NULL) */ 87 Offset yAdvDevice; /* Offset to Device table for vertical 88 * advance--measured from beginning of 89 * PosTable (may be NULL) */ 90 #endif 91 92 inline unsigned int get_len (void) const 93 { return _hb_popcount32 ((unsigned int) *this); } 94 inline unsigned int get_size (void) const 95 { return get_len () * Value::static_size; } 96 97 void apply_value (hb_font_t *font, 98 hb_direction_t direction, 99 const void *base, 100 const Value *values, 101 hb_glyph_position_t &glyph_pos) const 102 { 103 unsigned int x_ppem, y_ppem; 104 unsigned int format = *this; 105 hb_bool_t horizontal = HB_DIRECTION_IS_HORIZONTAL (direction); 106 107 if (!format) return; 108 109 if (format & xPlacement) glyph_pos.x_offset += font->em_scale_x (get_short (values++)); 110 if (format & yPlacement) glyph_pos.y_offset += font->em_scale_y (get_short (values++)); 111 if (format & xAdvance) { 112 if (likely (horizontal)) glyph_pos.x_advance += font->em_scale_x (get_short (values++)); else values++; 113 } 114 /* y_advance values grow downward but font-space grows upward, hence negation */ 115 if (format & yAdvance) { 116 if (unlikely (!horizontal)) glyph_pos.y_advance -= font->em_scale_y (get_short (values++)); else values++; 117 } 118 119 if (!has_device ()) return; 120 121 x_ppem = font->x_ppem; 122 y_ppem = font->y_ppem; 123 124 if (!x_ppem && !y_ppem) return; 125 126 /* pixel -> fractional pixel */ 127 if (format & xPlaDevice) { 128 if (x_ppem) glyph_pos.x_offset += (base + get_device (values++)).get_x_delta (font); else values++; 129 } 130 if (format & yPlaDevice) { 131 if (y_ppem) glyph_pos.y_offset += (base + get_device (values++)).get_y_delta (font); else values++; 132 } 133 if (format & xAdvDevice) { 134 if (horizontal && x_ppem) glyph_pos.x_advance += (base + get_device (values++)).get_x_delta (font); else values++; 135 } 136 if (format & yAdvDevice) { 137 /* y_advance values grow downward but font-space grows upward, hence negation */ 138 if (!horizontal && y_ppem) glyph_pos.y_advance -= (base + get_device (values++)).get_y_delta (font); else values++; 139 } 140 } 141 142 private: 143 inline bool sanitize_value_devices (hb_sanitize_context_t *c, void *base, Value *values) { 144 unsigned int format = *this; 145 146 if (format & xPlacement) values++; 147 if (format & yPlacement) values++; 148 if (format & xAdvance) values++; 149 if (format & yAdvance) values++; 150 151 if ((format & xPlaDevice) && !get_device (values++).sanitize (c, base)) return false; 152 if ((format & yPlaDevice) && !get_device (values++).sanitize (c, base)) return false; 153 if ((format & xAdvDevice) && !get_device (values++).sanitize (c, base)) return false; 154 if ((format & yAdvDevice) && !get_device (values++).sanitize (c, base)) return false; 155 156 return true; 157 } 158 159 static inline OffsetTo<Device>& get_device (Value* value) 160 { return *CastP<OffsetTo<Device> > (value); } 161 static inline const OffsetTo<Device>& get_device (const Value* value) 162 { return *CastP<OffsetTo<Device> > (value); } 163 164 static inline const SHORT& get_short (const Value* value) 165 { return *CastP<SHORT> (value); } 166 167 public: 168 169 inline bool has_device (void) const { 170 unsigned int format = *this; 171 return (format & devices) != 0; 172 } 173 174 inline bool sanitize_value (hb_sanitize_context_t *c, void *base, Value *values) { 175 TRACE_SANITIZE (this); 176 return TRACE_RETURN (c->check_range (values, get_size ()) && (!has_device () || sanitize_value_devices (c, base, values))); 177 } 178 179 inline bool sanitize_values (hb_sanitize_context_t *c, void *base, Value *values, unsigned int count) { 180 TRACE_SANITIZE (this); 181 unsigned int len = get_len (); 182 183 if (!c->check_array (values, get_size (), count)) return TRACE_RETURN (false); 184 185 if (!has_device ()) return TRACE_RETURN (true); 186 187 for (unsigned int i = 0; i < count; i++) { 188 if (!sanitize_value_devices (c, base, values)) 189 return TRACE_RETURN (false); 190 values += len; 191 } 192 193 return TRACE_RETURN (true); 194 } 195 196 /* Just sanitize referenced Device tables. Doesn't check the values themselves. */ 197 inline bool sanitize_values_stride_unsafe (hb_sanitize_context_t *c, void *base, Value *values, unsigned int count, unsigned int stride) { 198 TRACE_SANITIZE (this); 199 200 if (!has_device ()) return TRACE_RETURN (true); 201 202 for (unsigned int i = 0; i < count; i++) { 203 if (!sanitize_value_devices (c, base, values)) 204 return TRACE_RETURN (false); 205 values += stride; 206 } 207 208 return TRACE_RETURN (true); 209 } 210 }; 211 212 213 struct AnchorFormat1 214 { 215 inline void get_anchor (hb_font_t *font, hb_codepoint_t glyph_id HB_UNUSED, 216 hb_position_t *x, hb_position_t *y) const 217 { 218 *x = font->em_scale_x (xCoordinate); 219 *y = font->em_scale_y (yCoordinate); 220 } 221 222 inline bool sanitize (hb_sanitize_context_t *c) { 223 TRACE_SANITIZE (this); 224 return TRACE_RETURN (c->check_struct (this)); 225 } 226 227 protected: 228 USHORT format; /* Format identifier--format = 1 */ 229 SHORT xCoordinate; /* Horizontal value--in design units */ 230 SHORT yCoordinate; /* Vertical value--in design units */ 231 public: 232 DEFINE_SIZE_STATIC (6); 233 }; 234 235 struct AnchorFormat2 236 { 237 inline void get_anchor (hb_font_t *font, hb_codepoint_t glyph_id, 238 hb_position_t *x, hb_position_t *y) const 239 { 240 unsigned int x_ppem = font->x_ppem; 241 unsigned int y_ppem = font->y_ppem; 242 hb_position_t cx, cy; 243 hb_bool_t ret = false; 244 245 if (x_ppem || y_ppem) 246 ret = font->get_glyph_contour_point_for_origin (glyph_id, anchorPoint, HB_DIRECTION_LTR, &cx, &cy); 247 *x = x_ppem && ret ? cx : font->em_scale_x (xCoordinate); 248 *y = y_ppem && ret ? cy : font->em_scale_y (yCoordinate); 249 } 250 251 inline bool sanitize (hb_sanitize_context_t *c) { 252 TRACE_SANITIZE (this); 253 return TRACE_RETURN (c->check_struct (this)); 254 } 255 256 protected: 257 USHORT format; /* Format identifier--format = 2 */ 258 SHORT xCoordinate; /* Horizontal value--in design units */ 259 SHORT yCoordinate; /* Vertical value--in design units */ 260 USHORT anchorPoint; /* Index to glyph contour point */ 261 public: 262 DEFINE_SIZE_STATIC (8); 263 }; 264 265 struct AnchorFormat3 266 { 267 inline void get_anchor (hb_font_t *font, hb_codepoint_t glyph_id HB_UNUSED, 268 hb_position_t *x, hb_position_t *y) const 269 { 270 *x = font->em_scale_x (xCoordinate); 271 *y = font->em_scale_y (yCoordinate); 272 273 if (font->x_ppem) 274 *x += (this+xDeviceTable).get_x_delta (font); 275 if (font->y_ppem) 276 *y += (this+yDeviceTable).get_x_delta (font); 277 } 278 279 inline bool sanitize (hb_sanitize_context_t *c) { 280 TRACE_SANITIZE (this); 281 return TRACE_RETURN (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this)); 282 } 283 284 protected: 285 USHORT format; /* Format identifier--format = 3 */ 286 SHORT xCoordinate; /* Horizontal value--in design units */ 287 SHORT yCoordinate; /* Vertical value--in design units */ 288 OffsetTo<Device> 289 xDeviceTable; /* Offset to Device table for X 290 * coordinate-- from beginning of 291 * Anchor table (may be NULL) */ 292 OffsetTo<Device> 293 yDeviceTable; /* Offset to Device table for Y 294 * coordinate-- from beginning of 295 * Anchor table (may be NULL) */ 296 public: 297 DEFINE_SIZE_STATIC (10); 298 }; 299 300 struct Anchor 301 { 302 inline void get_anchor (hb_font_t *font, hb_codepoint_t glyph_id, 303 hb_position_t *x, hb_position_t *y) const 304 { 305 *x = *y = 0; 306 switch (u.format) { 307 case 1: u.format1.get_anchor (font, glyph_id, x, y); return; 308 case 2: u.format2.get_anchor (font, glyph_id, x, y); return; 309 case 3: u.format3.get_anchor (font, glyph_id, x, y); return; 310 default: return; 311 } 312 } 313 314 inline bool sanitize (hb_sanitize_context_t *c) { 315 TRACE_SANITIZE (this); 316 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 317 switch (u.format) { 318 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 319 case 2: return TRACE_RETURN (u.format2.sanitize (c)); 320 case 3: return TRACE_RETURN (u.format3.sanitize (c)); 321 default:return TRACE_RETURN (true); 322 } 323 } 324 325 protected: 326 union { 327 USHORT format; /* Format identifier */ 328 AnchorFormat1 format1; 329 AnchorFormat2 format2; 330 AnchorFormat3 format3; 331 } u; 332 public: 333 DEFINE_SIZE_UNION (2, format); 334 }; 335 336 337 struct AnchorMatrix 338 { 339 inline const Anchor& get_anchor (unsigned int row, unsigned int col, unsigned int cols, bool *found) const { 340 *found = false; 341 if (unlikely (row >= rows || col >= cols)) return Null(Anchor); 342 *found = !matrix[row * cols + col].is_null (); 343 return this+matrix[row * cols + col]; 344 } 345 346 inline bool sanitize (hb_sanitize_context_t *c, unsigned int cols) { 347 TRACE_SANITIZE (this); 348 if (!c->check_struct (this)) return TRACE_RETURN (false); 349 if (unlikely (rows > 0 && cols >= ((unsigned int) -1) / rows)) return TRACE_RETURN (false); 350 unsigned int count = rows * cols; 351 if (!c->check_array (matrix, matrix[0].static_size, count)) return TRACE_RETURN (false); 352 for (unsigned int i = 0; i < count; i++) 353 if (!matrix[i].sanitize (c, this)) return TRACE_RETURN (false); 354 return TRACE_RETURN (true); 355 } 356 357 USHORT rows; /* Number of rows */ 358 protected: 359 OffsetTo<Anchor> 360 matrix[VAR]; /* Matrix of offsets to Anchor tables-- 361 * from beginning of AnchorMatrix table */ 362 public: 363 DEFINE_SIZE_ARRAY (2, matrix); 364 }; 365 366 367 struct MarkRecord 368 { 369 friend struct MarkArray; 370 371 inline bool sanitize (hb_sanitize_context_t *c, void *base) { 372 TRACE_SANITIZE (this); 373 return TRACE_RETURN (c->check_struct (this) && markAnchor.sanitize (c, base)); 374 } 375 376 protected: 377 USHORT klass; /* Class defined for this mark */ 378 OffsetTo<Anchor> 379 markAnchor; /* Offset to Anchor table--from 380 * beginning of MarkArray table */ 381 public: 382 DEFINE_SIZE_STATIC (4); 383 }; 384 385 struct MarkArray : ArrayOf<MarkRecord> /* Array of MarkRecords--in Coverage order */ 386 { 387 inline bool apply (hb_apply_context_t *c, 388 unsigned int mark_index, unsigned int glyph_index, 389 const AnchorMatrix &anchors, unsigned int class_count, 390 unsigned int glyph_pos) const 391 { 392 TRACE_APPLY (this); 393 hb_buffer_t *buffer = c->buffer; 394 const MarkRecord &record = ArrayOf<MarkRecord>::operator[](mark_index); 395 unsigned int mark_class = record.klass; 396 397 const Anchor& mark_anchor = this + record.markAnchor; 398 bool found; 399 const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, class_count, &found); 400 /* If this subtable doesn't have an anchor for this base and this class, 401 * return false such that the subsequent subtables have a chance at it. */ 402 if (unlikely (!found)) return TRACE_RETURN (false); 403 404 hb_position_t mark_x, mark_y, base_x, base_y; 405 406 mark_anchor.get_anchor (c->font, buffer->cur().codepoint, &mark_x, &mark_y); 407 glyph_anchor.get_anchor (c->font, buffer->info[glyph_pos].codepoint, &base_x, &base_y); 408 409 hb_glyph_position_t &o = buffer->cur_pos(); 410 o.x_offset = base_x - mark_x; 411 o.y_offset = base_y - mark_y; 412 o.attach_lookback() = buffer->idx - glyph_pos; 413 414 buffer->idx++; 415 return TRACE_RETURN (true); 416 } 417 418 inline bool sanitize (hb_sanitize_context_t *c) { 419 TRACE_SANITIZE (this); 420 return TRACE_RETURN (ArrayOf<MarkRecord>::sanitize (c, this)); 421 } 422 }; 423 424 425 /* Lookups */ 426 427 struct SinglePosFormat1 428 { 429 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 430 { 431 TRACE_COLLECT_GLYPHS (this); 432 (this+coverage).add_coverage (c->input); 433 } 434 435 inline const Coverage &get_coverage (void) const 436 { 437 return this+coverage; 438 } 439 440 inline bool apply (hb_apply_context_t *c) const 441 { 442 TRACE_APPLY (this); 443 hb_buffer_t *buffer = c->buffer; 444 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint); 445 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 446 447 valueFormat.apply_value (c->font, c->direction, this, 448 values, buffer->cur_pos()); 449 450 buffer->idx++; 451 return TRACE_RETURN (true); 452 } 453 454 inline bool sanitize (hb_sanitize_context_t *c) { 455 TRACE_SANITIZE (this); 456 return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) && valueFormat.sanitize_value (c, this, values)); 457 } 458 459 protected: 460 USHORT format; /* Format identifier--format = 1 */ 461 OffsetTo<Coverage> 462 coverage; /* Offset to Coverage table--from 463 * beginning of subtable */ 464 ValueFormat valueFormat; /* Defines the types of data in the 465 * ValueRecord */ 466 ValueRecord values; /* Defines positioning 467 * value(s)--applied to all glyphs in 468 * the Coverage table */ 469 public: 470 DEFINE_SIZE_ARRAY (6, values); 471 }; 472 473 struct SinglePosFormat2 474 { 475 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 476 { 477 TRACE_COLLECT_GLYPHS (this); 478 (this+coverage).add_coverage (c->input); 479 } 480 481 inline const Coverage &get_coverage (void) const 482 { 483 return this+coverage; 484 } 485 486 inline bool apply (hb_apply_context_t *c) const 487 { 488 TRACE_APPLY (this); 489 hb_buffer_t *buffer = c->buffer; 490 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint); 491 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 492 493 if (likely (index >= valueCount)) return TRACE_RETURN (false); 494 495 valueFormat.apply_value (c->font, c->direction, this, 496 &values[index * valueFormat.get_len ()], 497 buffer->cur_pos()); 498 499 buffer->idx++; 500 return TRACE_RETURN (true); 501 } 502 503 inline bool sanitize (hb_sanitize_context_t *c) { 504 TRACE_SANITIZE (this); 505 return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) && valueFormat.sanitize_values (c, this, values, valueCount)); 506 } 507 508 protected: 509 USHORT format; /* Format identifier--format = 2 */ 510 OffsetTo<Coverage> 511 coverage; /* Offset to Coverage table--from 512 * beginning of subtable */ 513 ValueFormat valueFormat; /* Defines the types of data in the 514 * ValueRecord */ 515 USHORT valueCount; /* Number of ValueRecords */ 516 ValueRecord values; /* Array of ValueRecords--positioning 517 * values applied to glyphs */ 518 public: 519 DEFINE_SIZE_ARRAY (8, values); 520 }; 521 522 struct SinglePos 523 { 524 template <typename context_t> 525 inline typename context_t::return_t dispatch (context_t *c) const 526 { 527 TRACE_DISPATCH (this); 528 switch (u.format) { 529 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 530 case 2: return TRACE_RETURN (c->dispatch (u.format2)); 531 default:return TRACE_RETURN (c->default_return_value ()); 532 } 533 } 534 535 inline bool sanitize (hb_sanitize_context_t *c) { 536 TRACE_SANITIZE (this); 537 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 538 switch (u.format) { 539 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 540 case 2: return TRACE_RETURN (u.format2.sanitize (c)); 541 default:return TRACE_RETURN (true); 542 } 543 } 544 545 protected: 546 union { 547 USHORT format; /* Format identifier */ 548 SinglePosFormat1 format1; 549 SinglePosFormat2 format2; 550 } u; 551 }; 552 553 554 struct PairValueRecord 555 { 556 friend struct PairSet; 557 558 protected: 559 GlyphID secondGlyph; /* GlyphID of second glyph in the 560 * pair--first glyph is listed in the 561 * Coverage table */ 562 ValueRecord values; /* Positioning data for the first glyph 563 * followed by for second glyph */ 564 public: 565 DEFINE_SIZE_ARRAY (2, values); 566 }; 567 568 struct PairSet 569 { 570 friend struct PairPosFormat1; 571 572 inline void collect_glyphs (hb_collect_glyphs_context_t *c, 573 const ValueFormat *valueFormats) const 574 { 575 TRACE_COLLECT_GLYPHS (this); 576 unsigned int len1 = valueFormats[0].get_len (); 577 unsigned int len2 = valueFormats[1].get_len (); 578 unsigned int record_size = USHORT::static_size * (1 + len1 + len2); 579 580 const PairValueRecord *record = CastP<PairValueRecord> (array); 581 unsigned int count = len; 582 for (unsigned int i = 0; i < count; i++) 583 { 584 c->input->add (record->secondGlyph); 585 record = &StructAtOffset<PairValueRecord> (record, record_size); 586 } 587 } 588 589 inline bool apply (hb_apply_context_t *c, 590 const ValueFormat *valueFormats, 591 unsigned int pos) const 592 { 593 TRACE_APPLY (this); 594 hb_buffer_t *buffer = c->buffer; 595 unsigned int len1 = valueFormats[0].get_len (); 596 unsigned int len2 = valueFormats[1].get_len (); 597 unsigned int record_size = USHORT::static_size * (1 + len1 + len2); 598 599 const PairValueRecord *record = CastP<PairValueRecord> (array); 600 unsigned int count = len; 601 for (unsigned int i = 0; i < count; i++) 602 { 603 /* TODO bsearch */ 604 if (buffer->info[pos].codepoint == record->secondGlyph) 605 { 606 valueFormats[0].apply_value (c->font, c->direction, this, 607 &record->values[0], buffer->cur_pos()); 608 valueFormats[1].apply_value (c->font, c->direction, this, 609 &record->values[len1], buffer->pos[pos]); 610 if (len2) 611 pos++; 612 buffer->idx = pos; 613 return TRACE_RETURN (true); 614 } 615 record = &StructAtOffset<PairValueRecord> (record, record_size); 616 } 617 618 return TRACE_RETURN (false); 619 } 620 621 struct sanitize_closure_t { 622 void *base; 623 ValueFormat *valueFormats; 624 unsigned int len1; /* valueFormats[0].get_len() */ 625 unsigned int stride; /* 1 + len1 + len2 */ 626 }; 627 628 inline bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) { 629 TRACE_SANITIZE (this); 630 if (!(c->check_struct (this) 631 && c->check_array (array, USHORT::static_size * closure->stride, len))) return TRACE_RETURN (false); 632 633 unsigned int count = len; 634 PairValueRecord *record = CastP<PairValueRecord> (array); 635 return TRACE_RETURN (closure->valueFormats[0].sanitize_values_stride_unsafe (c, closure->base, &record->values[0], count, closure->stride) 636 && closure->valueFormats[1].sanitize_values_stride_unsafe (c, closure->base, &record->values[closure->len1], count, closure->stride)); 637 } 638 639 protected: 640 USHORT len; /* Number of PairValueRecords */ 641 USHORT array[VAR]; /* Array of PairValueRecords--ordered 642 * by GlyphID of the second glyph */ 643 public: 644 DEFINE_SIZE_ARRAY (2, array); 645 }; 646 647 struct PairPosFormat1 648 { 649 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 650 { 651 TRACE_COLLECT_GLYPHS (this); 652 (this+coverage).add_coverage (c->input); 653 unsigned int count = pairSet.len; 654 for (unsigned int i = 0; i < count; i++) 655 (this+pairSet[i]).collect_glyphs (c, &valueFormat1); 656 } 657 658 inline const Coverage &get_coverage (void) const 659 { 660 return this+coverage; 661 } 662 663 inline bool apply (hb_apply_context_t *c) const 664 { 665 TRACE_APPLY (this); 666 hb_buffer_t *buffer = c->buffer; 667 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, buffer->idx, 1); 668 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); 669 670 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint); 671 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 672 673 if (!skippy_iter.next ()) return TRACE_RETURN (false); 674 675 return TRACE_RETURN ((this+pairSet[index]).apply (c, &valueFormat1, skippy_iter.idx)); 676 } 677 678 inline bool sanitize (hb_sanitize_context_t *c) { 679 TRACE_SANITIZE (this); 680 681 unsigned int len1 = valueFormat1.get_len (); 682 unsigned int len2 = valueFormat2.get_len (); 683 PairSet::sanitize_closure_t closure = { 684 this, 685 &valueFormat1, 686 len1, 687 1 + len1 + len2 688 }; 689 690 return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) && pairSet.sanitize (c, this, &closure)); 691 } 692 693 protected: 694 USHORT format; /* Format identifier--format = 1 */ 695 OffsetTo<Coverage> 696 coverage; /* Offset to Coverage table--from 697 * beginning of subtable */ 698 ValueFormat valueFormat1; /* Defines the types of data in 699 * ValueRecord1--for the first glyph 700 * in the pair--may be zero (0) */ 701 ValueFormat valueFormat2; /* Defines the types of data in 702 * ValueRecord2--for the second glyph 703 * in the pair--may be zero (0) */ 704 OffsetArrayOf<PairSet> 705 pairSet; /* Array of PairSet tables 706 * ordered by Coverage Index */ 707 public: 708 DEFINE_SIZE_ARRAY (10, pairSet); 709 }; 710 711 struct PairPosFormat2 712 { 713 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 714 { 715 TRACE_COLLECT_GLYPHS (this); 716 /* (this+coverage).add_coverage (c->input); // Don't need this. */ 717 718 unsigned int count1 = class1Count; 719 const ClassDef &klass1 = this+classDef1; 720 for (unsigned int i = 0; i < count1; i++) 721 klass1.add_class (c->input, i); 722 723 unsigned int count2 = class2Count; 724 const ClassDef &klass2 = this+classDef2; 725 for (unsigned int i = 0; i < count2; i++) 726 klass2.add_class (c->input, i); 727 } 728 729 inline const Coverage &get_coverage (void) const 730 { 731 return this+coverage; 732 } 733 734 inline bool apply (hb_apply_context_t *c) const 735 { 736 TRACE_APPLY (this); 737 hb_buffer_t *buffer = c->buffer; 738 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, buffer->idx, 1); 739 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); 740 741 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint); 742 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 743 744 if (!skippy_iter.next ()) return TRACE_RETURN (false); 745 746 unsigned int len1 = valueFormat1.get_len (); 747 unsigned int len2 = valueFormat2.get_len (); 748 unsigned int record_len = len1 + len2; 749 750 unsigned int klass1 = (this+classDef1).get_class (buffer->cur().codepoint); 751 unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.idx].codepoint); 752 if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return TRACE_RETURN (false); 753 754 const Value *v = &values[record_len * (klass1 * class2Count + klass2)]; 755 valueFormat1.apply_value (c->font, c->direction, this, 756 v, buffer->cur_pos()); 757 valueFormat2.apply_value (c->font, c->direction, this, 758 v + len1, buffer->pos[skippy_iter.idx]); 759 760 buffer->idx = skippy_iter.idx; 761 if (len2) 762 buffer->idx++; 763 764 return TRACE_RETURN (true); 765 } 766 767 inline bool sanitize (hb_sanitize_context_t *c) { 768 TRACE_SANITIZE (this); 769 if (!(c->check_struct (this) 770 && coverage.sanitize (c, this) 771 && classDef1.sanitize (c, this) 772 && classDef2.sanitize (c, this))) return TRACE_RETURN (false); 773 774 unsigned int len1 = valueFormat1.get_len (); 775 unsigned int len2 = valueFormat2.get_len (); 776 unsigned int stride = len1 + len2; 777 unsigned int record_size = valueFormat1.get_size () + valueFormat2.get_size (); 778 unsigned int count = (unsigned int) class1Count * (unsigned int) class2Count; 779 return TRACE_RETURN (c->check_array (values, record_size, count) && 780 valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) && 781 valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride)); 782 } 783 784 protected: 785 USHORT format; /* Format identifier--format = 2 */ 786 OffsetTo<Coverage> 787 coverage; /* Offset to Coverage table--from 788 * beginning of subtable */ 789 ValueFormat valueFormat1; /* ValueRecord definition--for the 790 * first glyph of the pair--may be zero 791 * (0) */ 792 ValueFormat valueFormat2; /* ValueRecord definition--for the 793 * second glyph of the pair--may be 794 * zero (0) */ 795 OffsetTo<ClassDef> 796 classDef1; /* Offset to ClassDef table--from 797 * beginning of PairPos subtable--for 798 * the first glyph of the pair */ 799 OffsetTo<ClassDef> 800 classDef2; /* Offset to ClassDef table--from 801 * beginning of PairPos subtable--for 802 * the second glyph of the pair */ 803 USHORT class1Count; /* Number of classes in ClassDef1 804 * table--includes Class0 */ 805 USHORT class2Count; /* Number of classes in ClassDef2 806 * table--includes Class0 */ 807 ValueRecord values; /* Matrix of value pairs: 808 * class1-major, class2-minor, 809 * Each entry has value1 and value2 */ 810 public: 811 DEFINE_SIZE_ARRAY (16, values); 812 }; 813 814 struct PairPos 815 { 816 template <typename context_t> 817 inline typename context_t::return_t dispatch (context_t *c) const 818 { 819 TRACE_DISPATCH (this); 820 switch (u.format) { 821 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 822 case 2: return TRACE_RETURN (c->dispatch (u.format2)); 823 default:return TRACE_RETURN (c->default_return_value ()); 824 } 825 } 826 827 inline bool sanitize (hb_sanitize_context_t *c) { 828 TRACE_SANITIZE (this); 829 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 830 switch (u.format) { 831 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 832 case 2: return TRACE_RETURN (u.format2.sanitize (c)); 833 default:return TRACE_RETURN (true); 834 } 835 } 836 837 protected: 838 union { 839 USHORT format; /* Format identifier */ 840 PairPosFormat1 format1; 841 PairPosFormat2 format2; 842 } u; 843 }; 844 845 846 struct EntryExitRecord 847 { 848 friend struct CursivePosFormat1; 849 850 inline bool sanitize (hb_sanitize_context_t *c, void *base) { 851 TRACE_SANITIZE (this); 852 return TRACE_RETURN (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base)); 853 } 854 855 protected: 856 OffsetTo<Anchor> 857 entryAnchor; /* Offset to EntryAnchor table--from 858 * beginning of CursivePos 859 * subtable--may be NULL */ 860 OffsetTo<Anchor> 861 exitAnchor; /* Offset to ExitAnchor table--from 862 * beginning of CursivePos 863 * subtable--may be NULL */ 864 public: 865 DEFINE_SIZE_STATIC (4); 866 }; 867 868 struct CursivePosFormat1 869 { 870 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 871 { 872 TRACE_COLLECT_GLYPHS (this); 873 (this+coverage).add_coverage (c->input); 874 } 875 876 inline const Coverage &get_coverage (void) const 877 { 878 return this+coverage; 879 } 880 881 inline bool apply (hb_apply_context_t *c) const 882 { 883 TRACE_APPLY (this); 884 hb_buffer_t *buffer = c->buffer; 885 886 /* We don't handle mark glyphs here. */ 887 if (unlikely (_hb_glyph_info_is_mark (&buffer->cur()))) return TRACE_RETURN (false); 888 889 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, buffer->idx, 1); 890 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); 891 892 const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage (buffer->cur().codepoint)]; 893 if (!this_record.exitAnchor) return TRACE_RETURN (false); 894 895 if (!skippy_iter.next ()) return TRACE_RETURN (false); 896 897 const EntryExitRecord &next_record = entryExitRecord[(this+coverage).get_coverage (buffer->info[skippy_iter.idx].codepoint)]; 898 if (!next_record.entryAnchor) return TRACE_RETURN (false); 899 900 unsigned int i = buffer->idx; 901 unsigned int j = skippy_iter.idx; 902 903 hb_position_t entry_x, entry_y, exit_x, exit_y; 904 (this+this_record.exitAnchor).get_anchor (c->font, buffer->info[i].codepoint, &exit_x, &exit_y); 905 (this+next_record.entryAnchor).get_anchor (c->font, buffer->info[j].codepoint, &entry_x, &entry_y); 906 907 hb_glyph_position_t *pos = buffer->pos; 908 909 hb_position_t d; 910 /* Main-direction adjustment */ 911 switch (c->direction) { 912 case HB_DIRECTION_LTR: 913 pos[i].x_advance = exit_x + pos[i].x_offset; 914 915 d = entry_x + pos[j].x_offset; 916 pos[j].x_advance -= d; 917 pos[j].x_offset -= d; 918 break; 919 case HB_DIRECTION_RTL: 920 d = exit_x + pos[i].x_offset; 921 pos[i].x_advance -= d; 922 pos[i].x_offset -= d; 923 924 pos[j].x_advance = entry_x + pos[j].x_offset; 925 break; 926 case HB_DIRECTION_TTB: 927 pos[i].y_advance = exit_y + pos[i].y_offset; 928 929 d = entry_y + pos[j].y_offset; 930 pos[j].y_advance -= d; 931 pos[j].y_offset -= d; 932 break; 933 case HB_DIRECTION_BTT: 934 d = exit_y + pos[i].y_offset; 935 pos[i].y_advance -= d; 936 pos[i].y_offset -= d; 937 938 pos[j].y_advance = entry_y; 939 break; 940 case HB_DIRECTION_INVALID: 941 default: 942 break; 943 } 944 945 /* Cross-direction adjustment */ 946 if (c->lookup_props & LookupFlag::RightToLeft) { 947 pos[i].cursive_chain() = j - i; 948 if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction))) 949 pos[i].y_offset = entry_y - exit_y; 950 else 951 pos[i].x_offset = entry_x - exit_x; 952 } else { 953 pos[j].cursive_chain() = i - j; 954 if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction))) 955 pos[j].y_offset = exit_y - entry_y; 956 else 957 pos[j].x_offset = exit_x - entry_x; 958 } 959 960 buffer->idx = j; 961 return TRACE_RETURN (true); 962 } 963 964 inline bool sanitize (hb_sanitize_context_t *c) { 965 TRACE_SANITIZE (this); 966 return TRACE_RETURN (coverage.sanitize (c, this) && entryExitRecord.sanitize (c, this)); 967 } 968 969 protected: 970 USHORT format; /* Format identifier--format = 1 */ 971 OffsetTo<Coverage> 972 coverage; /* Offset to Coverage table--from 973 * beginning of subtable */ 974 ArrayOf<EntryExitRecord> 975 entryExitRecord; /* Array of EntryExit records--in 976 * Coverage Index order */ 977 public: 978 DEFINE_SIZE_ARRAY (6, entryExitRecord); 979 }; 980 981 struct CursivePos 982 { 983 template <typename context_t> 984 inline typename context_t::return_t dispatch (context_t *c) const 985 { 986 TRACE_DISPATCH (this); 987 switch (u.format) { 988 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 989 default:return TRACE_RETURN (c->default_return_value ()); 990 } 991 } 992 993 inline bool sanitize (hb_sanitize_context_t *c) { 994 TRACE_SANITIZE (this); 995 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 996 switch (u.format) { 997 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 998 default:return TRACE_RETURN (true); 999 } 1000 } 1001 1002 protected: 1003 union { 1004 USHORT format; /* Format identifier */ 1005 CursivePosFormat1 format1; 1006 } u; 1007 }; 1008 1009 1010 typedef AnchorMatrix BaseArray; /* base-major-- 1011 * in order of BaseCoverage Index--, 1012 * mark-minor-- 1013 * ordered by class--zero-based. */ 1014 1015 struct MarkBasePosFormat1 1016 { 1017 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1018 { 1019 TRACE_COLLECT_GLYPHS (this); 1020 (this+markCoverage).add_coverage (c->input); 1021 (this+baseCoverage).add_coverage (c->input); 1022 } 1023 1024 inline const Coverage &get_coverage (void) const 1025 { 1026 return this+markCoverage; 1027 } 1028 1029 inline bool apply (hb_apply_context_t *c) const 1030 { 1031 TRACE_APPLY (this); 1032 hb_buffer_t *buffer = c->buffer; 1033 unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint); 1034 if (likely (mark_index == NOT_COVERED)) return TRACE_RETURN (false); 1035 1036 /* now we search backwards for a non-mark glyph */ 1037 hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, buffer->idx, 1); 1038 skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks); 1039 do { 1040 if (!skippy_iter.prev ()) return TRACE_RETURN (false); 1041 /* We only want to attach to the first of a MultipleSubst sequence. Reject others. */ 1042 if (0 == _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx])) break; 1043 skippy_iter.reject (); 1044 } while (1); 1045 1046 /* Checking that matched glyph is actually a base glyph by GDEF is too strong; disabled */ 1047 if (!_hb_glyph_info_is_base_glyph (&buffer->info[skippy_iter.idx])) { /*return TRACE_RETURN (false);*/ } 1048 1049 unsigned int base_index = (this+baseCoverage).get_coverage (buffer->info[skippy_iter.idx].codepoint); 1050 if (base_index == NOT_COVERED) return TRACE_RETURN (false); 1051 1052 return TRACE_RETURN ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx)); 1053 } 1054 1055 inline bool sanitize (hb_sanitize_context_t *c) { 1056 TRACE_SANITIZE (this); 1057 return TRACE_RETURN (c->check_struct (this) && markCoverage.sanitize (c, this) && baseCoverage.sanitize (c, this) && 1058 markArray.sanitize (c, this) && baseArray.sanitize (c, this, (unsigned int) classCount)); 1059 } 1060 1061 protected: 1062 USHORT format; /* Format identifier--format = 1 */ 1063 OffsetTo<Coverage> 1064 markCoverage; /* Offset to MarkCoverage table--from 1065 * beginning of MarkBasePos subtable */ 1066 OffsetTo<Coverage> 1067 baseCoverage; /* Offset to BaseCoverage table--from 1068 * beginning of MarkBasePos subtable */ 1069 USHORT classCount; /* Number of classes defined for marks */ 1070 OffsetTo<MarkArray> 1071 markArray; /* Offset to MarkArray table--from 1072 * beginning of MarkBasePos subtable */ 1073 OffsetTo<BaseArray> 1074 baseArray; /* Offset to BaseArray table--from 1075 * beginning of MarkBasePos subtable */ 1076 public: 1077 DEFINE_SIZE_STATIC (12); 1078 }; 1079 1080 struct MarkBasePos 1081 { 1082 template <typename context_t> 1083 inline typename context_t::return_t dispatch (context_t *c) const 1084 { 1085 TRACE_DISPATCH (this); 1086 switch (u.format) { 1087 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 1088 default:return TRACE_RETURN (c->default_return_value ()); 1089 } 1090 } 1091 1092 inline bool sanitize (hb_sanitize_context_t *c) { 1093 TRACE_SANITIZE (this); 1094 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 1095 switch (u.format) { 1096 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 1097 default:return TRACE_RETURN (true); 1098 } 1099 } 1100 1101 protected: 1102 union { 1103 USHORT format; /* Format identifier */ 1104 MarkBasePosFormat1 format1; 1105 } u; 1106 }; 1107 1108 1109 typedef AnchorMatrix LigatureAttach; /* component-major-- 1110 * in order of writing direction--, 1111 * mark-minor-- 1112 * ordered by class--zero-based. */ 1113 1114 typedef OffsetListOf<LigatureAttach> LigatureArray; 1115 /* Array of LigatureAttach 1116 * tables ordered by 1117 * LigatureCoverage Index */ 1118 1119 struct MarkLigPosFormat1 1120 { 1121 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1122 { 1123 TRACE_COLLECT_GLYPHS (this); 1124 (this+markCoverage).add_coverage (c->input); 1125 (this+ligatureCoverage).add_coverage (c->input); 1126 } 1127 1128 inline const Coverage &get_coverage (void) const 1129 { 1130 return this+markCoverage; 1131 } 1132 1133 inline bool apply (hb_apply_context_t *c) const 1134 { 1135 TRACE_APPLY (this); 1136 hb_buffer_t *buffer = c->buffer; 1137 unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint); 1138 if (likely (mark_index == NOT_COVERED)) return TRACE_RETURN (false); 1139 1140 /* now we search backwards for a non-mark glyph */ 1141 hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, buffer->idx, 1); 1142 skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks); 1143 if (!skippy_iter.prev ()) return TRACE_RETURN (false); 1144 1145 /* Checking that matched glyph is actually a ligature by GDEF is too strong; disabled */ 1146 if (!_hb_glyph_info_is_ligature (&buffer->info[skippy_iter.idx])) { /*return TRACE_RETURN (false);*/ } 1147 1148 unsigned int j = skippy_iter.idx; 1149 unsigned int lig_index = (this+ligatureCoverage).get_coverage (buffer->info[j].codepoint); 1150 if (lig_index == NOT_COVERED) return TRACE_RETURN (false); 1151 1152 const LigatureArray& lig_array = this+ligatureArray; 1153 const LigatureAttach& lig_attach = lig_array[lig_index]; 1154 1155 /* Find component to attach to */ 1156 unsigned int comp_count = lig_attach.rows; 1157 if (unlikely (!comp_count)) return TRACE_RETURN (false); 1158 1159 /* We must now check whether the ligature ID of the current mark glyph 1160 * is identical to the ligature ID of the found ligature. If yes, we 1161 * can directly use the component index. If not, we attach the mark 1162 * glyph to the last component of the ligature. */ 1163 unsigned int comp_index; 1164 unsigned int lig_id = _hb_glyph_info_get_lig_id (&buffer->info[j]); 1165 unsigned int mark_id = _hb_glyph_info_get_lig_id (&buffer->cur()); 1166 unsigned int mark_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); 1167 if (lig_id && lig_id == mark_id && mark_comp > 0) 1168 comp_index = MIN (comp_count, _hb_glyph_info_get_lig_comp (&buffer->cur())) - 1; 1169 else 1170 comp_index = comp_count - 1; 1171 1172 return TRACE_RETURN ((this+markArray).apply (c, mark_index, comp_index, lig_attach, classCount, j)); 1173 } 1174 1175 inline bool sanitize (hb_sanitize_context_t *c) { 1176 TRACE_SANITIZE (this); 1177 return TRACE_RETURN (c->check_struct (this) && markCoverage.sanitize (c, this) && ligatureCoverage.sanitize (c, this) && 1178 markArray.sanitize (c, this) && ligatureArray.sanitize (c, this, (unsigned int) classCount)); 1179 } 1180 1181 protected: 1182 USHORT format; /* Format identifier--format = 1 */ 1183 OffsetTo<Coverage> 1184 markCoverage; /* Offset to Mark Coverage table--from 1185 * beginning of MarkLigPos subtable */ 1186 OffsetTo<Coverage> 1187 ligatureCoverage; /* Offset to Ligature Coverage 1188 * table--from beginning of MarkLigPos 1189 * subtable */ 1190 USHORT classCount; /* Number of defined mark classes */ 1191 OffsetTo<MarkArray> 1192 markArray; /* Offset to MarkArray table--from 1193 * beginning of MarkLigPos subtable */ 1194 OffsetTo<LigatureArray> 1195 ligatureArray; /* Offset to LigatureArray table--from 1196 * beginning of MarkLigPos subtable */ 1197 public: 1198 DEFINE_SIZE_STATIC (12); 1199 }; 1200 1201 struct MarkLigPos 1202 { 1203 template <typename context_t> 1204 inline typename context_t::return_t dispatch (context_t *c) const 1205 { 1206 TRACE_DISPATCH (this); 1207 switch (u.format) { 1208 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 1209 default:return TRACE_RETURN (c->default_return_value ()); 1210 } 1211 } 1212 1213 inline bool sanitize (hb_sanitize_context_t *c) { 1214 TRACE_SANITIZE (this); 1215 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 1216 switch (u.format) { 1217 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 1218 default:return TRACE_RETURN (true); 1219 } 1220 } 1221 1222 protected: 1223 union { 1224 USHORT format; /* Format identifier */ 1225 MarkLigPosFormat1 format1; 1226 } u; 1227 }; 1228 1229 1230 typedef AnchorMatrix Mark2Array; /* mark2-major-- 1231 * in order of Mark2Coverage Index--, 1232 * mark1-minor-- 1233 * ordered by class--zero-based. */ 1234 1235 struct MarkMarkPosFormat1 1236 { 1237 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1238 { 1239 TRACE_COLLECT_GLYPHS (this); 1240 (this+mark1Coverage).add_coverage (c->input); 1241 (this+mark2Coverage).add_coverage (c->input); 1242 } 1243 1244 inline const Coverage &get_coverage (void) const 1245 { 1246 return this+mark1Coverage; 1247 } 1248 1249 inline bool apply (hb_apply_context_t *c) const 1250 { 1251 TRACE_APPLY (this); 1252 hb_buffer_t *buffer = c->buffer; 1253 unsigned int mark1_index = (this+mark1Coverage).get_coverage (buffer->cur().codepoint); 1254 if (likely (mark1_index == NOT_COVERED)) return TRACE_RETURN (false); 1255 1256 /* now we search backwards for a suitable mark glyph until a non-mark glyph */ 1257 hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, buffer->idx, 1); 1258 skippy_iter.set_lookup_props (c->lookup_props & ~LookupFlag::IgnoreFlags); 1259 if (!skippy_iter.prev ()) return TRACE_RETURN (false); 1260 1261 if (!_hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx])) { return TRACE_RETURN (false); } 1262 1263 unsigned int j = skippy_iter.idx; 1264 1265 unsigned int id1 = _hb_glyph_info_get_lig_id (&buffer->cur()); 1266 unsigned int id2 = _hb_glyph_info_get_lig_id (&buffer->info[j]); 1267 unsigned int comp1 = _hb_glyph_info_get_lig_comp (&buffer->cur()); 1268 unsigned int comp2 = _hb_glyph_info_get_lig_comp (&buffer->info[j]); 1269 1270 if (likely (id1 == id2)) { 1271 if (id1 == 0) /* Marks belonging to the same base. */ 1272 goto good; 1273 else if (comp1 == comp2) /* Marks belonging to the same ligature component. */ 1274 goto good; 1275 } else { 1276 /* If ligature ids don't match, it may be the case that one of the marks 1277 * itself is a ligature. In which case match. */ 1278 if ((id1 > 0 && !comp1) || (id2 > 0 && !comp2)) 1279 goto good; 1280 } 1281 1282 /* Didn't match. */ 1283 return TRACE_RETURN (false); 1284 1285 good: 1286 unsigned int mark2_index = (this+mark2Coverage).get_coverage (buffer->info[j].codepoint); 1287 if (mark2_index == NOT_COVERED) return TRACE_RETURN (false); 1288 1289 return TRACE_RETURN ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j)); 1290 } 1291 1292 inline bool sanitize (hb_sanitize_context_t *c) { 1293 TRACE_SANITIZE (this); 1294 return TRACE_RETURN (c->check_struct (this) && mark1Coverage.sanitize (c, this) && 1295 mark2Coverage.sanitize (c, this) && mark1Array.sanitize (c, this) 1296 && mark2Array.sanitize (c, this, (unsigned int) classCount)); 1297 } 1298 1299 protected: 1300 USHORT format; /* Format identifier--format = 1 */ 1301 OffsetTo<Coverage> 1302 mark1Coverage; /* Offset to Combining Mark1 Coverage 1303 * table--from beginning of MarkMarkPos 1304 * subtable */ 1305 OffsetTo<Coverage> 1306 mark2Coverage; /* Offset to Combining Mark2 Coverage 1307 * table--from beginning of MarkMarkPos 1308 * subtable */ 1309 USHORT classCount; /* Number of defined mark classes */ 1310 OffsetTo<MarkArray> 1311 mark1Array; /* Offset to Mark1Array table--from 1312 * beginning of MarkMarkPos subtable */ 1313 OffsetTo<Mark2Array> 1314 mark2Array; /* Offset to Mark2Array table--from 1315 * beginning of MarkMarkPos subtable */ 1316 public: 1317 DEFINE_SIZE_STATIC (12); 1318 }; 1319 1320 struct MarkMarkPos 1321 { 1322 template <typename context_t> 1323 inline typename context_t::return_t dispatch (context_t *c) const 1324 { 1325 TRACE_DISPATCH (this); 1326 switch (u.format) { 1327 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 1328 default:return TRACE_RETURN (c->default_return_value ()); 1329 } 1330 } 1331 1332 inline bool sanitize (hb_sanitize_context_t *c) { 1333 TRACE_SANITIZE (this); 1334 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 1335 switch (u.format) { 1336 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 1337 default:return TRACE_RETURN (true); 1338 } 1339 } 1340 1341 protected: 1342 union { 1343 USHORT format; /* Format identifier */ 1344 MarkMarkPosFormat1 format1; 1345 } u; 1346 }; 1347 1348 1349 struct ContextPos : Context {}; 1350 1351 struct ChainContextPos : ChainContext {}; 1352 1353 struct ExtensionPos : Extension<ExtensionPos> 1354 { 1355 typedef struct PosLookupSubTable LookupSubTable; 1356 }; 1357 1358 1359 1360 /* 1361 * PosLookup 1362 */ 1363 1364 1365 struct PosLookupSubTable 1366 { 1367 friend struct PosLookup; 1368 1369 enum Type { 1370 Single = 1, 1371 Pair = 2, 1372 Cursive = 3, 1373 MarkBase = 4, 1374 MarkLig = 5, 1375 MarkMark = 6, 1376 Context = 7, 1377 ChainContext = 8, 1378 Extension = 9 1379 }; 1380 1381 template <typename context_t> 1382 inline typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type) const 1383 { 1384 TRACE_DISPATCH (this); 1385 switch (lookup_type) { 1386 case Single: return TRACE_RETURN (u.single.dispatch (c)); 1387 case Pair: return TRACE_RETURN (u.pair.dispatch (c)); 1388 case Cursive: return TRACE_RETURN (u.cursive.dispatch (c)); 1389 case MarkBase: return TRACE_RETURN (u.markBase.dispatch (c)); 1390 case MarkLig: return TRACE_RETURN (u.markLig.dispatch (c)); 1391 case MarkMark: return TRACE_RETURN (u.markMark.dispatch (c)); 1392 case Context: return TRACE_RETURN (u.context.dispatch (c)); 1393 case ChainContext: return TRACE_RETURN (u.chainContext.dispatch (c)); 1394 case Extension: return TRACE_RETURN (u.extension.dispatch (c)); 1395 default: return TRACE_RETURN (c->default_return_value ()); 1396 } 1397 } 1398 1399 inline bool sanitize (hb_sanitize_context_t *c, unsigned int lookup_type) { 1400 TRACE_SANITIZE (this); 1401 if (!u.header.sub_format.sanitize (c)) 1402 return TRACE_RETURN (false); 1403 switch (lookup_type) { 1404 case Single: return TRACE_RETURN (u.single.sanitize (c)); 1405 case Pair: return TRACE_RETURN (u.pair.sanitize (c)); 1406 case Cursive: return TRACE_RETURN (u.cursive.sanitize (c)); 1407 case MarkBase: return TRACE_RETURN (u.markBase.sanitize (c)); 1408 case MarkLig: return TRACE_RETURN (u.markLig.sanitize (c)); 1409 case MarkMark: return TRACE_RETURN (u.markMark.sanitize (c)); 1410 case Context: return TRACE_RETURN (u.context.sanitize (c)); 1411 case ChainContext: return TRACE_RETURN (u.chainContext.sanitize (c)); 1412 case Extension: return TRACE_RETURN (u.extension.sanitize (c)); 1413 default: return TRACE_RETURN (true); 1414 } 1415 } 1416 1417 protected: 1418 union { 1419 struct { 1420 USHORT sub_format; 1421 } header; 1422 SinglePos single; 1423 PairPos pair; 1424 CursivePos cursive; 1425 MarkBasePos markBase; 1426 MarkLigPos markLig; 1427 MarkMarkPos markMark; 1428 ContextPos context; 1429 ChainContextPos chainContext; 1430 ExtensionPos extension; 1431 } u; 1432 public: 1433 DEFINE_SIZE_UNION (2, header.sub_format); 1434 }; 1435 1436 1437 struct PosLookup : Lookup 1438 { 1439 inline const PosLookupSubTable& get_subtable (unsigned int i) const 1440 { return this+CastR<OffsetArrayOf<PosLookupSubTable> > (subTable)[i]; } 1441 1442 inline bool is_reverse (void) const 1443 { 1444 return false; 1445 } 1446 1447 inline hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const 1448 { 1449 TRACE_COLLECT_GLYPHS (this); 1450 c->set_recurse_func (NULL); 1451 return TRACE_RETURN (dispatch (c)); 1452 } 1453 1454 template <typename set_t> 1455 inline void add_coverage (set_t *glyphs) const 1456 { 1457 hb_get_coverage_context_t c; 1458 const Coverage *last = NULL; 1459 unsigned int count = get_subtable_count (); 1460 for (unsigned int i = 0; i < count; i++) { 1461 const Coverage *coverage = &get_subtable (i).dispatch (&c, get_type ()); 1462 if (coverage != last) { 1463 coverage->add_coverage (glyphs); 1464 last = coverage; 1465 } 1466 } 1467 } 1468 1469 inline bool apply_once (hb_apply_context_t *c) const 1470 { 1471 TRACE_APPLY (this); 1472 if (!c->check_glyph_property (&c->buffer->cur(), c->lookup_props)) 1473 return TRACE_RETURN (false); 1474 return TRACE_RETURN (dispatch (c)); 1475 } 1476 1477 static bool apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index); 1478 1479 template <typename context_t> 1480 static inline typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index); 1481 1482 template <typename context_t> 1483 inline typename context_t::return_t dispatch (context_t *c) const 1484 { 1485 TRACE_DISPATCH (this); 1486 unsigned int lookup_type = get_type (); 1487 unsigned int count = get_subtable_count (); 1488 for (unsigned int i = 0; i < count; i++) { 1489 typename context_t::return_t r = get_subtable (i).dispatch (c, lookup_type); 1490 if (c->stop_sublookup_iteration (r)) 1491 return TRACE_RETURN (r); 1492 } 1493 return TRACE_RETURN (c->default_return_value ()); 1494 } 1495 1496 inline bool sanitize (hb_sanitize_context_t *c) { 1497 TRACE_SANITIZE (this); 1498 if (unlikely (!Lookup::sanitize (c))) return TRACE_RETURN (false); 1499 OffsetArrayOf<PosLookupSubTable> &list = CastR<OffsetArrayOf<PosLookupSubTable> > (subTable); 1500 return TRACE_RETURN (list.sanitize (c, this, get_type ())); 1501 } 1502 }; 1503 1504 typedef OffsetListOf<PosLookup> PosLookupList; 1505 1506 /* 1507 * GPOS -- The Glyph Positioning Table 1508 */ 1509 1510 struct GPOS : GSUBGPOS 1511 { 1512 static const hb_tag_t tableTag = HB_OT_TAG_GPOS; 1513 1514 inline const PosLookup& get_lookup (unsigned int i) const 1515 { return CastR<PosLookup> (GSUBGPOS::get_lookup (i)); } 1516 1517 static inline void position_start (hb_font_t *font, hb_buffer_t *buffer); 1518 static inline void position_finish (hb_font_t *font, hb_buffer_t *buffer); 1519 1520 inline bool sanitize (hb_sanitize_context_t *c) { 1521 TRACE_SANITIZE (this); 1522 if (unlikely (!GSUBGPOS::sanitize (c))) return TRACE_RETURN (false); 1523 OffsetTo<PosLookupList> &list = CastR<OffsetTo<PosLookupList> > (lookupList); 1524 return TRACE_RETURN (list.sanitize (c, this)); 1525 } 1526 public: 1527 DEFINE_SIZE_STATIC (10); 1528 }; 1529 1530 1531 static void 1532 fix_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction) 1533 { 1534 unsigned int j = pos[i].cursive_chain(); 1535 if (likely (!j)) 1536 return; 1537 1538 j += i; 1539 1540 pos[i].cursive_chain() = 0; 1541 1542 fix_cursive_minor_offset (pos, j, direction); 1543 1544 if (HB_DIRECTION_IS_HORIZONTAL (direction)) 1545 pos[i].y_offset += pos[j].y_offset; 1546 else 1547 pos[i].x_offset += pos[j].x_offset; 1548 } 1549 1550 static void 1551 fix_mark_attachment (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction) 1552 { 1553 if (likely (!(pos[i].attach_lookback()))) 1554 return; 1555 1556 unsigned int j = i - pos[i].attach_lookback(); 1557 1558 pos[i].x_offset += pos[j].x_offset; 1559 pos[i].y_offset += pos[j].y_offset; 1560 1561 if (HB_DIRECTION_IS_FORWARD (direction)) 1562 for (unsigned int k = j; k < i; k++) { 1563 pos[i].x_offset -= pos[k].x_advance; 1564 pos[i].y_offset -= pos[k].y_advance; 1565 } 1566 else 1567 for (unsigned int k = j + 1; k < i + 1; k++) { 1568 pos[i].x_offset += pos[k].x_advance; 1569 pos[i].y_offset += pos[k].y_advance; 1570 } 1571 } 1572 1573 void 1574 GPOS::position_start (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer) 1575 { 1576 buffer->clear_positions (); 1577 1578 unsigned int count = buffer->len; 1579 for (unsigned int i = 0; i < count; i++) 1580 buffer->pos[i].attach_lookback() = buffer->pos[i].cursive_chain() = 0; 1581 } 1582 1583 void 1584 GPOS::position_finish (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer) 1585 { 1586 unsigned int len; 1587 hb_glyph_position_t *pos = hb_buffer_get_glyph_positions (buffer, &len); 1588 hb_direction_t direction = buffer->props.direction; 1589 1590 /* Handle cursive connections */ 1591 for (unsigned int i = 0; i < len; i++) 1592 fix_cursive_minor_offset (pos, i, direction); 1593 1594 /* Handle attachments */ 1595 for (unsigned int i = 0; i < len; i++) 1596 fix_mark_attachment (pos, i, direction); 1597 1598 _hb_buffer_deallocate_gsubgpos_vars (buffer); 1599 } 1600 1601 1602 /* Out-of-class implementation for methods recursing */ 1603 1604 template <typename context_t> 1605 inline typename context_t::return_t PosLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index) 1606 { 1607 const GPOS &gpos = *(hb_ot_layout_from_face (c->face)->gpos); 1608 const PosLookup &l = gpos.get_lookup (lookup_index); 1609 return l.dispatch (c); 1610 } 1611 1612 inline bool PosLookup::apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index) 1613 { 1614 const GPOS &gpos = *(hb_ot_layout_from_face (c->face)->gpos); 1615 const PosLookup &l = gpos.get_lookup (lookup_index); 1616 unsigned int saved_lookup_props = c->lookup_props; 1617 c->set_lookup (l); 1618 bool ret = l.apply_once (c); 1619 c->lookup_props = saved_lookup_props; 1620 return ret; 1621 } 1622 1623 1624 #undef attach_lookback 1625 #undef cursive_chain 1626 1627 1628 } /* namespace OT */ 1629 1630 1631 #endif /* HB_OT_LAYOUT_GPOS_TABLE_HH */ 1632