1 /* 2 * Copyright 2007,2008,2009,2010 Red Hat, Inc. 3 * Copyright 2010,2012 Google, Inc. 4 * 5 * This is part of HarfBuzz, a text shaping library. 6 * 7 * Permission is hereby granted, without written agreement and without 8 * license or royalty fees, to use, copy, modify, and distribute this 9 * software and its documentation for any purpose, provided that the 10 * above copyright notice and the following two paragraphs appear in 11 * all copies of this software. 12 * 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 17 * DAMAGE. 18 * 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 24 * 25 * Red Hat Author(s): Behdad Esfahbod 26 * Google Author(s): Behdad Esfahbod 27 */ 28 29 #ifndef HB_OT_LAYOUT_GPOS_TABLE_HH 30 #define HB_OT_LAYOUT_GPOS_TABLE_HH 31 32 #include "hb-ot-layout-gsubgpos-private.hh" 33 34 35 namespace OT { 36 37 38 /* buffer **position** var allocations */ 39 #define attach_lookback() var.u16[0] /* number of glyphs to go back to attach this glyph to its base */ 40 #define cursive_chain() var.i16[1] /* character to which this connects, may be positive or negative */ 41 42 43 /* Shared Tables: ValueRecord, Anchor Table, and MarkArray */ 44 45 typedef USHORT Value; 46 47 typedef Value ValueRecord[VAR]; 48 49 struct ValueFormat : USHORT 50 { 51 enum Flags { 52 xPlacement = 0x0001, /* Includes horizontal adjustment for placement */ 53 yPlacement = 0x0002, /* Includes vertical adjustment for placement */ 54 xAdvance = 0x0004, /* Includes horizontal adjustment for advance */ 55 yAdvance = 0x0008, /* Includes vertical adjustment for advance */ 56 xPlaDevice = 0x0010, /* Includes horizontal Device table for placement */ 57 yPlaDevice = 0x0020, /* Includes vertical Device table for placement */ 58 xAdvDevice = 0x0040, /* Includes horizontal Device table for advance */ 59 yAdvDevice = 0x0080, /* Includes vertical Device table for advance */ 60 ignored = 0x0F00, /* Was used in TrueType Open for MM fonts */ 61 reserved = 0xF000, /* For future use */ 62 63 devices = 0x00F0 /* Mask for having any Device table */ 64 }; 65 66 /* All fields are options. Only those available advance the value pointer. */ 67 #if 0 68 SHORT xPlacement; /* Horizontal adjustment for 69 * placement--in design units */ 70 SHORT yPlacement; /* Vertical adjustment for 71 * placement--in design units */ 72 SHORT xAdvance; /* Horizontal adjustment for 73 * advance--in design units (only used 74 * for horizontal writing) */ 75 SHORT yAdvance; /* Vertical adjustment for advance--in 76 * design units (only used for vertical 77 * writing) */ 78 Offset xPlaDevice; /* Offset to Device table for 79 * horizontal placement--measured from 80 * beginning of PosTable (may be NULL) */ 81 Offset yPlaDevice; /* Offset to Device table for vertical 82 * placement--measured from beginning 83 * of PosTable (may be NULL) */ 84 Offset xAdvDevice; /* Offset to Device table for 85 * horizontal advance--measured from 86 * beginning of PosTable (may be NULL) */ 87 Offset yAdvDevice; /* Offset to Device table for vertical 88 * advance--measured from beginning of 89 * PosTable (may be NULL) */ 90 #endif 91 92 inline unsigned int get_len (void) const 93 { return _hb_popcount32 ((unsigned int) *this); } 94 inline unsigned int get_size (void) const 95 { return get_len () * Value::static_size; } 96 97 void apply_value (hb_font_t *font, 98 hb_direction_t direction, 99 const void *base, 100 const Value *values, 101 hb_glyph_position_t &glyph_pos) const 102 { 103 unsigned int x_ppem, y_ppem; 104 unsigned int format = *this; 105 hb_bool_t horizontal = HB_DIRECTION_IS_HORIZONTAL (direction); 106 107 if (!format) return; 108 109 if (format & xPlacement) glyph_pos.x_offset += font->em_scale_x (get_short (values++)); 110 if (format & yPlacement) glyph_pos.y_offset += font->em_scale_y (get_short (values++)); 111 if (format & xAdvance) { 112 if (likely (horizontal)) glyph_pos.x_advance += font->em_scale_x (get_short (values++)); else values++; 113 } 114 /* y_advance values grow downward but font-space grows upward, hence negation */ 115 if (format & yAdvance) { 116 if (unlikely (!horizontal)) glyph_pos.y_advance -= font->em_scale_y (get_short (values++)); else values++; 117 } 118 119 if (!has_device ()) return; 120 121 x_ppem = font->x_ppem; 122 y_ppem = font->y_ppem; 123 124 if (!x_ppem && !y_ppem) return; 125 126 /* pixel -> fractional pixel */ 127 if (format & xPlaDevice) { 128 if (x_ppem) glyph_pos.x_offset += (base + get_device (values++)).get_x_delta (font); else values++; 129 } 130 if (format & yPlaDevice) { 131 if (y_ppem) glyph_pos.y_offset += (base + get_device (values++)).get_y_delta (font); else values++; 132 } 133 if (format & xAdvDevice) { 134 if (horizontal && x_ppem) glyph_pos.x_advance += (base + get_device (values++)).get_x_delta (font); else values++; 135 } 136 if (format & yAdvDevice) { 137 /* y_advance values grow downward but font-space grows upward, hence negation */ 138 if (!horizontal && y_ppem) glyph_pos.y_advance -= (base + get_device (values++)).get_y_delta (font); else values++; 139 } 140 } 141 142 private: 143 inline bool sanitize_value_devices (hb_sanitize_context_t *c, void *base, Value *values) { 144 unsigned int format = *this; 145 146 if (format & xPlacement) values++; 147 if (format & yPlacement) values++; 148 if (format & xAdvance) values++; 149 if (format & yAdvance) values++; 150 151 if ((format & xPlaDevice) && !get_device (values++).sanitize (c, base)) return false; 152 if ((format & yPlaDevice) && !get_device (values++).sanitize (c, base)) return false; 153 if ((format & xAdvDevice) && !get_device (values++).sanitize (c, base)) return false; 154 if ((format & yAdvDevice) && !get_device (values++).sanitize (c, base)) return false; 155 156 return true; 157 } 158 159 static inline OffsetTo<Device>& get_device (Value* value) 160 { return *CastP<OffsetTo<Device> > (value); } 161 static inline const OffsetTo<Device>& get_device (const Value* value) 162 { return *CastP<OffsetTo<Device> > (value); } 163 164 static inline const SHORT& get_short (const Value* value) 165 { return *CastP<SHORT> (value); } 166 167 public: 168 169 inline bool has_device (void) const { 170 unsigned int format = *this; 171 return (format & devices) != 0; 172 } 173 174 inline bool sanitize_value (hb_sanitize_context_t *c, void *base, Value *values) { 175 TRACE_SANITIZE (this); 176 return TRACE_RETURN (c->check_range (values, get_size ()) && (!has_device () || sanitize_value_devices (c, base, values))); 177 } 178 179 inline bool sanitize_values (hb_sanitize_context_t *c, void *base, Value *values, unsigned int count) { 180 TRACE_SANITIZE (this); 181 unsigned int len = get_len (); 182 183 if (!c->check_array (values, get_size (), count)) return TRACE_RETURN (false); 184 185 if (!has_device ()) return TRACE_RETURN (true); 186 187 for (unsigned int i = 0; i < count; i++) { 188 if (!sanitize_value_devices (c, base, values)) 189 return TRACE_RETURN (false); 190 values += len; 191 } 192 193 return TRACE_RETURN (true); 194 } 195 196 /* Just sanitize referenced Device tables. Doesn't check the values themselves. */ 197 inline bool sanitize_values_stride_unsafe (hb_sanitize_context_t *c, void *base, Value *values, unsigned int count, unsigned int stride) { 198 TRACE_SANITIZE (this); 199 200 if (!has_device ()) return TRACE_RETURN (true); 201 202 for (unsigned int i = 0; i < count; i++) { 203 if (!sanitize_value_devices (c, base, values)) 204 return TRACE_RETURN (false); 205 values += stride; 206 } 207 208 return TRACE_RETURN (true); 209 } 210 }; 211 212 213 struct AnchorFormat1 214 { 215 inline void get_anchor (hb_font_t *font, hb_codepoint_t glyph_id HB_UNUSED, 216 hb_position_t *x, hb_position_t *y) const 217 { 218 *x = font->em_scale_x (xCoordinate); 219 *y = font->em_scale_y (yCoordinate); 220 } 221 222 inline bool sanitize (hb_sanitize_context_t *c) { 223 TRACE_SANITIZE (this); 224 return TRACE_RETURN (c->check_struct (this)); 225 } 226 227 protected: 228 USHORT format; /* Format identifier--format = 1 */ 229 SHORT xCoordinate; /* Horizontal value--in design units */ 230 SHORT yCoordinate; /* Vertical value--in design units */ 231 public: 232 DEFINE_SIZE_STATIC (6); 233 }; 234 235 struct AnchorFormat2 236 { 237 inline void get_anchor (hb_font_t *font, hb_codepoint_t glyph_id, 238 hb_position_t *x, hb_position_t *y) const 239 { 240 unsigned int x_ppem = font->x_ppem; 241 unsigned int y_ppem = font->y_ppem; 242 hb_position_t cx, cy; 243 hb_bool_t ret = false; 244 245 if (x_ppem || y_ppem) 246 ret = font->get_glyph_contour_point_for_origin (glyph_id, anchorPoint, HB_DIRECTION_LTR, &cx, &cy); 247 *x = x_ppem && ret ? cx : font->em_scale_x (xCoordinate); 248 *y = y_ppem && ret ? cy : font->em_scale_y (yCoordinate); 249 } 250 251 inline bool sanitize (hb_sanitize_context_t *c) { 252 TRACE_SANITIZE (this); 253 return TRACE_RETURN (c->check_struct (this)); 254 } 255 256 protected: 257 USHORT format; /* Format identifier--format = 2 */ 258 SHORT xCoordinate; /* Horizontal value--in design units */ 259 SHORT yCoordinate; /* Vertical value--in design units */ 260 USHORT anchorPoint; /* Index to glyph contour point */ 261 public: 262 DEFINE_SIZE_STATIC (8); 263 }; 264 265 struct AnchorFormat3 266 { 267 inline void get_anchor (hb_font_t *font, hb_codepoint_t glyph_id HB_UNUSED, 268 hb_position_t *x, hb_position_t *y) const 269 { 270 *x = font->em_scale_x (xCoordinate); 271 *y = font->em_scale_y (yCoordinate); 272 273 if (font->x_ppem) 274 *x += (this+xDeviceTable).get_x_delta (font); 275 if (font->y_ppem) 276 *y += (this+yDeviceTable).get_x_delta (font); 277 } 278 279 inline bool sanitize (hb_sanitize_context_t *c) { 280 TRACE_SANITIZE (this); 281 return TRACE_RETURN (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this)); 282 } 283 284 protected: 285 USHORT format; /* Format identifier--format = 3 */ 286 SHORT xCoordinate; /* Horizontal value--in design units */ 287 SHORT yCoordinate; /* Vertical value--in design units */ 288 OffsetTo<Device> 289 xDeviceTable; /* Offset to Device table for X 290 * coordinate-- from beginning of 291 * Anchor table (may be NULL) */ 292 OffsetTo<Device> 293 yDeviceTable; /* Offset to Device table for Y 294 * coordinate-- from beginning of 295 * Anchor table (may be NULL) */ 296 public: 297 DEFINE_SIZE_STATIC (10); 298 }; 299 300 struct Anchor 301 { 302 inline void get_anchor (hb_font_t *font, hb_codepoint_t glyph_id, 303 hb_position_t *x, hb_position_t *y) const 304 { 305 *x = *y = 0; 306 switch (u.format) { 307 case 1: u.format1.get_anchor (font, glyph_id, x, y); return; 308 case 2: u.format2.get_anchor (font, glyph_id, x, y); return; 309 case 3: u.format3.get_anchor (font, glyph_id, x, y); return; 310 default: return; 311 } 312 } 313 314 inline bool sanitize (hb_sanitize_context_t *c) { 315 TRACE_SANITIZE (this); 316 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 317 switch (u.format) { 318 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 319 case 2: return TRACE_RETURN (u.format2.sanitize (c)); 320 case 3: return TRACE_RETURN (u.format3.sanitize (c)); 321 default:return TRACE_RETURN (true); 322 } 323 } 324 325 protected: 326 union { 327 USHORT format; /* Format identifier */ 328 AnchorFormat1 format1; 329 AnchorFormat2 format2; 330 AnchorFormat3 format3; 331 } u; 332 public: 333 DEFINE_SIZE_UNION (2, format); 334 }; 335 336 337 struct AnchorMatrix 338 { 339 inline const Anchor& get_anchor (unsigned int row, unsigned int col, unsigned int cols, bool *found) const { 340 *found = false; 341 if (unlikely (row >= rows || col >= cols)) return Null(Anchor); 342 *found = !matrix[row * cols + col].is_null (); 343 return this+matrix[row * cols + col]; 344 } 345 346 inline bool sanitize (hb_sanitize_context_t *c, unsigned int cols) { 347 TRACE_SANITIZE (this); 348 if (!c->check_struct (this)) return TRACE_RETURN (false); 349 if (unlikely (rows > 0 && cols >= ((unsigned int) -1) / rows)) return TRACE_RETURN (false); 350 unsigned int count = rows * cols; 351 if (!c->check_array (matrix, matrix[0].static_size, count)) return TRACE_RETURN (false); 352 for (unsigned int i = 0; i < count; i++) 353 if (!matrix[i].sanitize (c, this)) return TRACE_RETURN (false); 354 return TRACE_RETURN (true); 355 } 356 357 USHORT rows; /* Number of rows */ 358 protected: 359 OffsetTo<Anchor> 360 matrix[VAR]; /* Matrix of offsets to Anchor tables-- 361 * from beginning of AnchorMatrix table */ 362 public: 363 DEFINE_SIZE_ARRAY (2, matrix); 364 }; 365 366 367 struct MarkRecord 368 { 369 friend struct MarkArray; 370 371 inline bool sanitize (hb_sanitize_context_t *c, void *base) { 372 TRACE_SANITIZE (this); 373 return TRACE_RETURN (c->check_struct (this) && markAnchor.sanitize (c, base)); 374 } 375 376 protected: 377 USHORT klass; /* Class defined for this mark */ 378 OffsetTo<Anchor> 379 markAnchor; /* Offset to Anchor table--from 380 * beginning of MarkArray table */ 381 public: 382 DEFINE_SIZE_STATIC (4); 383 }; 384 385 struct MarkArray : ArrayOf<MarkRecord> /* Array of MarkRecords--in Coverage order */ 386 { 387 inline bool apply (hb_apply_context_t *c, 388 unsigned int mark_index, unsigned int glyph_index, 389 const AnchorMatrix &anchors, unsigned int class_count, 390 unsigned int glyph_pos) const 391 { 392 TRACE_APPLY (this); 393 const MarkRecord &record = ArrayOf<MarkRecord>::operator[](mark_index); 394 unsigned int mark_class = record.klass; 395 396 const Anchor& mark_anchor = this + record.markAnchor; 397 bool found; 398 const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, class_count, &found); 399 /* If this subtable doesn't have an anchor for this base and this class, 400 * return false such that the subsequent subtables have a chance at it. */ 401 if (unlikely (!found)) return TRACE_RETURN (false); 402 403 hb_position_t mark_x, mark_y, base_x, base_y; 404 405 mark_anchor.get_anchor (c->font, c->buffer->cur().codepoint, &mark_x, &mark_y); 406 glyph_anchor.get_anchor (c->font, c->buffer->info[glyph_pos].codepoint, &base_x, &base_y); 407 408 hb_glyph_position_t &o = c->buffer->cur_pos(); 409 o.x_offset = base_x - mark_x; 410 o.y_offset = base_y - mark_y; 411 o.attach_lookback() = c->buffer->idx - glyph_pos; 412 413 c->buffer->idx++; 414 return TRACE_RETURN (true); 415 } 416 417 inline bool sanitize (hb_sanitize_context_t *c) { 418 TRACE_SANITIZE (this); 419 return TRACE_RETURN (ArrayOf<MarkRecord>::sanitize (c, this)); 420 } 421 }; 422 423 424 /* Lookups */ 425 426 struct SinglePosFormat1 427 { 428 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 429 { 430 TRACE_COLLECT_GLYPHS (this); 431 (this+coverage).add_coverage (c->input); 432 } 433 434 inline const Coverage &get_coverage (void) const 435 { 436 return this+coverage; 437 } 438 439 inline bool apply (hb_apply_context_t *c) const 440 { 441 TRACE_APPLY (this); 442 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 443 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 444 445 valueFormat.apply_value (c->font, c->direction, this, 446 values, c->buffer->cur_pos()); 447 448 c->buffer->idx++; 449 return TRACE_RETURN (true); 450 } 451 452 inline bool sanitize (hb_sanitize_context_t *c) { 453 TRACE_SANITIZE (this); 454 return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) && valueFormat.sanitize_value (c, this, values)); 455 } 456 457 protected: 458 USHORT format; /* Format identifier--format = 1 */ 459 OffsetTo<Coverage> 460 coverage; /* Offset to Coverage table--from 461 * beginning of subtable */ 462 ValueFormat valueFormat; /* Defines the types of data in the 463 * ValueRecord */ 464 ValueRecord values; /* Defines positioning 465 * value(s)--applied to all glyphs in 466 * the Coverage table */ 467 public: 468 DEFINE_SIZE_ARRAY (6, values); 469 }; 470 471 struct SinglePosFormat2 472 { 473 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 474 { 475 TRACE_COLLECT_GLYPHS (this); 476 (this+coverage).add_coverage (c->input); 477 } 478 479 inline const Coverage &get_coverage (void) const 480 { 481 return this+coverage; 482 } 483 484 inline bool apply (hb_apply_context_t *c) const 485 { 486 TRACE_APPLY (this); 487 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 488 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 489 490 if (likely (index >= valueCount)) return TRACE_RETURN (false); 491 492 valueFormat.apply_value (c->font, c->direction, this, 493 &values[index * valueFormat.get_len ()], 494 c->buffer->cur_pos()); 495 496 c->buffer->idx++; 497 return TRACE_RETURN (true); 498 } 499 500 inline bool sanitize (hb_sanitize_context_t *c) { 501 TRACE_SANITIZE (this); 502 return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) && valueFormat.sanitize_values (c, this, values, valueCount)); 503 } 504 505 protected: 506 USHORT format; /* Format identifier--format = 2 */ 507 OffsetTo<Coverage> 508 coverage; /* Offset to Coverage table--from 509 * beginning of subtable */ 510 ValueFormat valueFormat; /* Defines the types of data in the 511 * ValueRecord */ 512 USHORT valueCount; /* Number of ValueRecords */ 513 ValueRecord values; /* Array of ValueRecords--positioning 514 * values applied to glyphs */ 515 public: 516 DEFINE_SIZE_ARRAY (8, values); 517 }; 518 519 struct SinglePos 520 { 521 template <typename context_t> 522 inline typename context_t::return_t dispatch (context_t *c) const 523 { 524 TRACE_DISPATCH (this); 525 switch (u.format) { 526 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 527 case 2: return TRACE_RETURN (c->dispatch (u.format2)); 528 default:return TRACE_RETURN (c->default_return_value ()); 529 } 530 } 531 532 inline bool sanitize (hb_sanitize_context_t *c) { 533 TRACE_SANITIZE (this); 534 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 535 switch (u.format) { 536 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 537 case 2: return TRACE_RETURN (u.format2.sanitize (c)); 538 default:return TRACE_RETURN (true); 539 } 540 } 541 542 protected: 543 union { 544 USHORT format; /* Format identifier */ 545 SinglePosFormat1 format1; 546 SinglePosFormat2 format2; 547 } u; 548 }; 549 550 551 struct PairValueRecord 552 { 553 friend struct PairSet; 554 555 protected: 556 GlyphID secondGlyph; /* GlyphID of second glyph in the 557 * pair--first glyph is listed in the 558 * Coverage table */ 559 ValueRecord values; /* Positioning data for the first glyph 560 * followed by for second glyph */ 561 public: 562 DEFINE_SIZE_ARRAY (2, values); 563 }; 564 565 struct PairSet 566 { 567 friend struct PairPosFormat1; 568 569 inline void collect_glyphs (hb_collect_glyphs_context_t *c, 570 const ValueFormat *valueFormats) const 571 { 572 TRACE_COLLECT_GLYPHS (this); 573 unsigned int len1 = valueFormats[0].get_len (); 574 unsigned int len2 = valueFormats[1].get_len (); 575 unsigned int record_size = USHORT::static_size * (1 + len1 + len2); 576 577 const PairValueRecord *record = CastP<PairValueRecord> (array); 578 unsigned int count = len; 579 for (unsigned int i = 0; i < count; i++) 580 { 581 c->input->add (record->secondGlyph); 582 record = &StructAtOffset<PairValueRecord> (record, record_size); 583 } 584 } 585 586 inline bool apply (hb_apply_context_t *c, 587 const ValueFormat *valueFormats, 588 unsigned int pos) const 589 { 590 TRACE_APPLY (this); 591 unsigned int len1 = valueFormats[0].get_len (); 592 unsigned int len2 = valueFormats[1].get_len (); 593 unsigned int record_size = USHORT::static_size * (1 + len1 + len2); 594 595 const PairValueRecord *record = CastP<PairValueRecord> (array); 596 unsigned int count = len; 597 for (unsigned int i = 0; i < count; i++) 598 { 599 /* TODO bsearch */ 600 if (c->buffer->info[pos].codepoint == record->secondGlyph) 601 { 602 valueFormats[0].apply_value (c->font, c->direction, this, 603 &record->values[0], c->buffer->cur_pos()); 604 valueFormats[1].apply_value (c->font, c->direction, this, 605 &record->values[len1], c->buffer->pos[pos]); 606 if (len2) 607 pos++; 608 c->buffer->idx = pos; 609 return TRACE_RETURN (true); 610 } 611 record = &StructAtOffset<PairValueRecord> (record, record_size); 612 } 613 614 return TRACE_RETURN (false); 615 } 616 617 struct sanitize_closure_t { 618 void *base; 619 ValueFormat *valueFormats; 620 unsigned int len1; /* valueFormats[0].get_len() */ 621 unsigned int stride; /* 1 + len1 + len2 */ 622 }; 623 624 inline bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) { 625 TRACE_SANITIZE (this); 626 if (!(c->check_struct (this) 627 && c->check_array (array, USHORT::static_size * closure->stride, len))) return TRACE_RETURN (false); 628 629 unsigned int count = len; 630 PairValueRecord *record = CastP<PairValueRecord> (array); 631 return TRACE_RETURN (closure->valueFormats[0].sanitize_values_stride_unsafe (c, closure->base, &record->values[0], count, closure->stride) 632 && closure->valueFormats[1].sanitize_values_stride_unsafe (c, closure->base, &record->values[closure->len1], count, closure->stride)); 633 } 634 635 protected: 636 USHORT len; /* Number of PairValueRecords */ 637 USHORT array[VAR]; /* Array of PairValueRecords--ordered 638 * by GlyphID of the second glyph */ 639 public: 640 DEFINE_SIZE_ARRAY (2, array); 641 }; 642 643 struct PairPosFormat1 644 { 645 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 646 { 647 TRACE_COLLECT_GLYPHS (this); 648 (this+coverage).add_coverage (c->input); 649 unsigned int count = pairSet.len; 650 for (unsigned int i = 0; i < count; i++) 651 (this+pairSet[i]).collect_glyphs (c, &valueFormat1); 652 } 653 654 inline const Coverage &get_coverage (void) const 655 { 656 return this+coverage; 657 } 658 659 inline bool apply (hb_apply_context_t *c) const 660 { 661 TRACE_APPLY (this); 662 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, 1); 663 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); 664 665 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 666 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 667 668 if (!skippy_iter.next ()) return TRACE_RETURN (false); 669 670 return TRACE_RETURN ((this+pairSet[index]).apply (c, &valueFormat1, skippy_iter.idx)); 671 } 672 673 inline bool sanitize (hb_sanitize_context_t *c) { 674 TRACE_SANITIZE (this); 675 676 unsigned int len1 = valueFormat1.get_len (); 677 unsigned int len2 = valueFormat2.get_len (); 678 PairSet::sanitize_closure_t closure = { 679 this, 680 &valueFormat1, 681 len1, 682 1 + len1 + len2 683 }; 684 685 return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) && pairSet.sanitize (c, this, &closure)); 686 } 687 688 protected: 689 USHORT format; /* Format identifier--format = 1 */ 690 OffsetTo<Coverage> 691 coverage; /* Offset to Coverage table--from 692 * beginning of subtable */ 693 ValueFormat valueFormat1; /* Defines the types of data in 694 * ValueRecord1--for the first glyph 695 * in the pair--may be zero (0) */ 696 ValueFormat valueFormat2; /* Defines the types of data in 697 * ValueRecord2--for the second glyph 698 * in the pair--may be zero (0) */ 699 OffsetArrayOf<PairSet> 700 pairSet; /* Array of PairSet tables 701 * ordered by Coverage Index */ 702 public: 703 DEFINE_SIZE_ARRAY (10, pairSet); 704 }; 705 706 struct PairPosFormat2 707 { 708 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 709 { 710 TRACE_COLLECT_GLYPHS (this); 711 /* (this+coverage).add_coverage (c->input); // Don't need this. */ 712 713 /* TODO only add values for pairs that have nonzero adjustments. */ 714 715 unsigned int count1 = class1Count; 716 const ClassDef &klass1 = this+classDef1; 717 for (unsigned int i = 0; i < count1; i++) 718 klass1.add_class (c->input, i); 719 720 unsigned int count2 = class2Count; 721 const ClassDef &klass2 = this+classDef2; 722 for (unsigned int i = 0; i < count2; i++) 723 klass2.add_class (c->input, i); 724 } 725 726 inline const Coverage &get_coverage (void) const 727 { 728 return this+coverage; 729 } 730 731 inline bool apply (hb_apply_context_t *c) const 732 { 733 TRACE_APPLY (this); 734 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, 1); 735 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); 736 737 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 738 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); 739 740 if (!skippy_iter.next ()) return TRACE_RETURN (false); 741 742 unsigned int len1 = valueFormat1.get_len (); 743 unsigned int len2 = valueFormat2.get_len (); 744 unsigned int record_len = len1 + len2; 745 746 unsigned int klass1 = (this+classDef1).get_class (c->buffer->cur().codepoint); 747 unsigned int klass2 = (this+classDef2).get_class (c->buffer->info[skippy_iter.idx].codepoint); 748 if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return TRACE_RETURN (false); 749 750 const Value *v = &values[record_len * (klass1 * class2Count + klass2)]; 751 valueFormat1.apply_value (c->font, c->direction, this, 752 v, c->buffer->cur_pos()); 753 valueFormat2.apply_value (c->font, c->direction, this, 754 v + len1, c->buffer->pos[skippy_iter.idx]); 755 756 c->buffer->idx = skippy_iter.idx; 757 if (len2) 758 c->buffer->idx++; 759 760 return TRACE_RETURN (true); 761 } 762 763 inline bool sanitize (hb_sanitize_context_t *c) { 764 TRACE_SANITIZE (this); 765 if (!(c->check_struct (this) 766 && coverage.sanitize (c, this) 767 && classDef1.sanitize (c, this) 768 && classDef2.sanitize (c, this))) return TRACE_RETURN (false); 769 770 unsigned int len1 = valueFormat1.get_len (); 771 unsigned int len2 = valueFormat2.get_len (); 772 unsigned int stride = len1 + len2; 773 unsigned int record_size = valueFormat1.get_size () + valueFormat2.get_size (); 774 unsigned int count = (unsigned int) class1Count * (unsigned int) class2Count; 775 return TRACE_RETURN (c->check_array (values, record_size, count) && 776 valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) && 777 valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride)); 778 } 779 780 protected: 781 USHORT format; /* Format identifier--format = 2 */ 782 OffsetTo<Coverage> 783 coverage; /* Offset to Coverage table--from 784 * beginning of subtable */ 785 ValueFormat valueFormat1; /* ValueRecord definition--for the 786 * first glyph of the pair--may be zero 787 * (0) */ 788 ValueFormat valueFormat2; /* ValueRecord definition--for the 789 * second glyph of the pair--may be 790 * zero (0) */ 791 OffsetTo<ClassDef> 792 classDef1; /* Offset to ClassDef table--from 793 * beginning of PairPos subtable--for 794 * the first glyph of the pair */ 795 OffsetTo<ClassDef> 796 classDef2; /* Offset to ClassDef table--from 797 * beginning of PairPos subtable--for 798 * the second glyph of the pair */ 799 USHORT class1Count; /* Number of classes in ClassDef1 800 * table--includes Class0 */ 801 USHORT class2Count; /* Number of classes in ClassDef2 802 * table--includes Class0 */ 803 ValueRecord values; /* Matrix of value pairs: 804 * class1-major, class2-minor, 805 * Each entry has value1 and value2 */ 806 public: 807 DEFINE_SIZE_ARRAY (16, values); 808 }; 809 810 struct PairPos 811 { 812 template <typename context_t> 813 inline typename context_t::return_t dispatch (context_t *c) const 814 { 815 TRACE_DISPATCH (this); 816 switch (u.format) { 817 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 818 case 2: return TRACE_RETURN (c->dispatch (u.format2)); 819 default:return TRACE_RETURN (c->default_return_value ()); 820 } 821 } 822 823 inline bool sanitize (hb_sanitize_context_t *c) { 824 TRACE_SANITIZE (this); 825 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 826 switch (u.format) { 827 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 828 case 2: return TRACE_RETURN (u.format2.sanitize (c)); 829 default:return TRACE_RETURN (true); 830 } 831 } 832 833 protected: 834 union { 835 USHORT format; /* Format identifier */ 836 PairPosFormat1 format1; 837 PairPosFormat2 format2; 838 } u; 839 }; 840 841 842 struct EntryExitRecord 843 { 844 friend struct CursivePosFormat1; 845 846 inline bool sanitize (hb_sanitize_context_t *c, void *base) { 847 TRACE_SANITIZE (this); 848 return TRACE_RETURN (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base)); 849 } 850 851 protected: 852 OffsetTo<Anchor> 853 entryAnchor; /* Offset to EntryAnchor table--from 854 * beginning of CursivePos 855 * subtable--may be NULL */ 856 OffsetTo<Anchor> 857 exitAnchor; /* Offset to ExitAnchor table--from 858 * beginning of CursivePos 859 * subtable--may be NULL */ 860 public: 861 DEFINE_SIZE_STATIC (4); 862 }; 863 864 struct CursivePosFormat1 865 { 866 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 867 { 868 TRACE_COLLECT_GLYPHS (this); 869 (this+coverage).add_coverage (c->input); 870 } 871 872 inline const Coverage &get_coverage (void) const 873 { 874 return this+coverage; 875 } 876 877 inline bool apply (hb_apply_context_t *c) const 878 { 879 TRACE_APPLY (this); 880 881 /* We don't handle mark glyphs here. */ 882 if (c->buffer->cur().glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_MARK) return TRACE_RETURN (false); 883 884 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, 1); 885 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); 886 887 const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage (c->buffer->cur().codepoint)]; 888 if (!this_record.exitAnchor) return TRACE_RETURN (false); 889 890 if (!skippy_iter.next ()) return TRACE_RETURN (false); 891 892 const EntryExitRecord &next_record = entryExitRecord[(this+coverage).get_coverage (c->buffer->info[skippy_iter.idx].codepoint)]; 893 if (!next_record.entryAnchor) return TRACE_RETURN (false); 894 895 unsigned int i = c->buffer->idx; 896 unsigned int j = skippy_iter.idx; 897 898 hb_position_t entry_x, entry_y, exit_x, exit_y; 899 (this+this_record.exitAnchor).get_anchor (c->font, c->buffer->info[i].codepoint, &exit_x, &exit_y); 900 (this+next_record.entryAnchor).get_anchor (c->font, c->buffer->info[j].codepoint, &entry_x, &entry_y); 901 902 hb_glyph_position_t *pos = c->buffer->pos; 903 904 hb_position_t d; 905 /* Main-direction adjustment */ 906 switch (c->direction) { 907 case HB_DIRECTION_LTR: 908 pos[i].x_advance = exit_x + pos[i].x_offset; 909 910 d = entry_x + pos[j].x_offset; 911 pos[j].x_advance -= d; 912 pos[j].x_offset -= d; 913 break; 914 case HB_DIRECTION_RTL: 915 d = exit_x + pos[i].x_offset; 916 pos[i].x_advance -= d; 917 pos[i].x_offset -= d; 918 919 pos[j].x_advance = entry_x + pos[j].x_offset; 920 break; 921 case HB_DIRECTION_TTB: 922 pos[i].y_advance = exit_y + pos[i].y_offset; 923 924 d = entry_y + pos[j].y_offset; 925 pos[j].y_advance -= d; 926 pos[j].y_offset -= d; 927 break; 928 case HB_DIRECTION_BTT: 929 d = exit_y + pos[i].y_offset; 930 pos[i].y_advance -= d; 931 pos[i].y_offset -= d; 932 933 pos[j].y_advance = entry_y; 934 break; 935 case HB_DIRECTION_INVALID: 936 default: 937 break; 938 } 939 940 /* Cross-direction adjustment */ 941 if (c->lookup_props & LookupFlag::RightToLeft) { 942 pos[i].cursive_chain() = j - i; 943 if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction))) 944 pos[i].y_offset = entry_y - exit_y; 945 else 946 pos[i].x_offset = entry_x - exit_x; 947 } else { 948 pos[j].cursive_chain() = i - j; 949 if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction))) 950 pos[j].y_offset = exit_y - entry_y; 951 else 952 pos[j].x_offset = exit_x - entry_x; 953 } 954 955 c->buffer->idx = j; 956 return TRACE_RETURN (true); 957 } 958 959 inline bool sanitize (hb_sanitize_context_t *c) { 960 TRACE_SANITIZE (this); 961 return TRACE_RETURN (coverage.sanitize (c, this) && entryExitRecord.sanitize (c, this)); 962 } 963 964 protected: 965 USHORT format; /* Format identifier--format = 1 */ 966 OffsetTo<Coverage> 967 coverage; /* Offset to Coverage table--from 968 * beginning of subtable */ 969 ArrayOf<EntryExitRecord> 970 entryExitRecord; /* Array of EntryExit records--in 971 * Coverage Index order */ 972 public: 973 DEFINE_SIZE_ARRAY (6, entryExitRecord); 974 }; 975 976 struct CursivePos 977 { 978 template <typename context_t> 979 inline typename context_t::return_t dispatch (context_t *c) const 980 { 981 TRACE_DISPATCH (this); 982 switch (u.format) { 983 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 984 default:return TRACE_RETURN (c->default_return_value ()); 985 } 986 } 987 988 inline bool sanitize (hb_sanitize_context_t *c) { 989 TRACE_SANITIZE (this); 990 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 991 switch (u.format) { 992 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 993 default:return TRACE_RETURN (true); 994 } 995 } 996 997 protected: 998 union { 999 USHORT format; /* Format identifier */ 1000 CursivePosFormat1 format1; 1001 } u; 1002 }; 1003 1004 1005 typedef AnchorMatrix BaseArray; /* base-major-- 1006 * in order of BaseCoverage Index--, 1007 * mark-minor-- 1008 * ordered by class--zero-based. */ 1009 1010 struct MarkBasePosFormat1 1011 { 1012 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1013 { 1014 TRACE_COLLECT_GLYPHS (this); 1015 (this+markCoverage).add_coverage (c->input); 1016 (this+baseCoverage).add_coverage (c->input); 1017 /* TODO only add combinations that have nonzero adjustment. */ 1018 } 1019 1020 inline const Coverage &get_coverage (void) const 1021 { 1022 return this+markCoverage; 1023 } 1024 1025 inline bool apply (hb_apply_context_t *c) const 1026 { 1027 TRACE_APPLY (this); 1028 unsigned int mark_index = (this+markCoverage).get_coverage (c->buffer->cur().codepoint); 1029 if (likely (mark_index == NOT_COVERED)) return TRACE_RETURN (false); 1030 1031 /* now we search backwards for a non-mark glyph */ 1032 hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, c->buffer->idx, 1); 1033 skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks); 1034 do { 1035 if (!skippy_iter.prev ()) return TRACE_RETURN (false); 1036 /* We only want to attach to the first of a MultipleSubst sequence. Reject others. */ 1037 if (0 == get_lig_comp (c->buffer->info[skippy_iter.idx])) break; 1038 skippy_iter.reject (); 1039 } while (1); 1040 1041 /* The following assertion is too strong, so we've disabled it. */ 1042 if (!(c->buffer->info[skippy_iter.idx].glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_BASE_GLYPH)) {/*return TRACE_RETURN (false);*/} 1043 1044 unsigned int base_index = (this+baseCoverage).get_coverage (c->buffer->info[skippy_iter.idx].codepoint); 1045 if (base_index == NOT_COVERED) return TRACE_RETURN (false); 1046 1047 return TRACE_RETURN ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx)); 1048 } 1049 1050 inline bool sanitize (hb_sanitize_context_t *c) { 1051 TRACE_SANITIZE (this); 1052 return TRACE_RETURN (c->check_struct (this) && markCoverage.sanitize (c, this) && baseCoverage.sanitize (c, this) && 1053 markArray.sanitize (c, this) && baseArray.sanitize (c, this, (unsigned int) classCount)); 1054 } 1055 1056 protected: 1057 USHORT format; /* Format identifier--format = 1 */ 1058 OffsetTo<Coverage> 1059 markCoverage; /* Offset to MarkCoverage table--from 1060 * beginning of MarkBasePos subtable */ 1061 OffsetTo<Coverage> 1062 baseCoverage; /* Offset to BaseCoverage table--from 1063 * beginning of MarkBasePos subtable */ 1064 USHORT classCount; /* Number of classes defined for marks */ 1065 OffsetTo<MarkArray> 1066 markArray; /* Offset to MarkArray table--from 1067 * beginning of MarkBasePos subtable */ 1068 OffsetTo<BaseArray> 1069 baseArray; /* Offset to BaseArray table--from 1070 * beginning of MarkBasePos subtable */ 1071 public: 1072 DEFINE_SIZE_STATIC (12); 1073 }; 1074 1075 struct MarkBasePos 1076 { 1077 template <typename context_t> 1078 inline typename context_t::return_t dispatch (context_t *c) const 1079 { 1080 TRACE_DISPATCH (this); 1081 switch (u.format) { 1082 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 1083 default:return TRACE_RETURN (c->default_return_value ()); 1084 } 1085 } 1086 1087 inline bool sanitize (hb_sanitize_context_t *c) { 1088 TRACE_SANITIZE (this); 1089 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 1090 switch (u.format) { 1091 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 1092 default:return TRACE_RETURN (true); 1093 } 1094 } 1095 1096 protected: 1097 union { 1098 USHORT format; /* Format identifier */ 1099 MarkBasePosFormat1 format1; 1100 } u; 1101 }; 1102 1103 1104 typedef AnchorMatrix LigatureAttach; /* component-major-- 1105 * in order of writing direction--, 1106 * mark-minor-- 1107 * ordered by class--zero-based. */ 1108 1109 typedef OffsetListOf<LigatureAttach> LigatureArray; 1110 /* Array of LigatureAttach 1111 * tables ordered by 1112 * LigatureCoverage Index */ 1113 1114 struct MarkLigPosFormat1 1115 { 1116 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1117 { 1118 TRACE_COLLECT_GLYPHS (this); 1119 (this+markCoverage).add_coverage (c->input); 1120 (this+ligatureCoverage).add_coverage (c->input); 1121 /* TODO only add combinations that have nonzero adjustment. */ 1122 } 1123 1124 inline const Coverage &get_coverage (void) const 1125 { 1126 return this+markCoverage; 1127 } 1128 1129 inline bool apply (hb_apply_context_t *c) const 1130 { 1131 TRACE_APPLY (this); 1132 unsigned int mark_index = (this+markCoverage).get_coverage (c->buffer->cur().codepoint); 1133 if (likely (mark_index == NOT_COVERED)) return TRACE_RETURN (false); 1134 1135 /* now we search backwards for a non-mark glyph */ 1136 hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, c->buffer->idx, 1); 1137 skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks); 1138 if (!skippy_iter.prev ()) return TRACE_RETURN (false); 1139 1140 /* The following assertion is too strong, so we've disabled it. */ 1141 if (!(c->buffer->info[skippy_iter.idx].glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE)) {/*return TRACE_RETURN (false);*/} 1142 1143 unsigned int j = skippy_iter.idx; 1144 unsigned int lig_index = (this+ligatureCoverage).get_coverage (c->buffer->info[j].codepoint); 1145 if (lig_index == NOT_COVERED) return TRACE_RETURN (false); 1146 1147 const LigatureArray& lig_array = this+ligatureArray; 1148 const LigatureAttach& lig_attach = lig_array[lig_index]; 1149 1150 /* Find component to attach to */ 1151 unsigned int comp_count = lig_attach.rows; 1152 if (unlikely (!comp_count)) return TRACE_RETURN (false); 1153 1154 /* We must now check whether the ligature ID of the current mark glyph 1155 * is identical to the ligature ID of the found ligature. If yes, we 1156 * can directly use the component index. If not, we attach the mark 1157 * glyph to the last component of the ligature. */ 1158 unsigned int comp_index; 1159 unsigned int lig_id = get_lig_id (c->buffer->info[j]); 1160 unsigned int mark_id = get_lig_id (c->buffer->cur()); 1161 unsigned int mark_comp = get_lig_comp (c->buffer->cur()); 1162 if (lig_id && lig_id == mark_id && mark_comp > 0) 1163 comp_index = MIN (comp_count, get_lig_comp (c->buffer->cur())) - 1; 1164 else 1165 comp_index = comp_count - 1; 1166 1167 return TRACE_RETURN ((this+markArray).apply (c, mark_index, comp_index, lig_attach, classCount, j)); 1168 } 1169 1170 inline bool sanitize (hb_sanitize_context_t *c) { 1171 TRACE_SANITIZE (this); 1172 return TRACE_RETURN (c->check_struct (this) && markCoverage.sanitize (c, this) && ligatureCoverage.sanitize (c, this) && 1173 markArray.sanitize (c, this) && ligatureArray.sanitize (c, this, (unsigned int) classCount)); 1174 } 1175 1176 protected: 1177 USHORT format; /* Format identifier--format = 1 */ 1178 OffsetTo<Coverage> 1179 markCoverage; /* Offset to Mark Coverage table--from 1180 * beginning of MarkLigPos subtable */ 1181 OffsetTo<Coverage> 1182 ligatureCoverage; /* Offset to Ligature Coverage 1183 * table--from beginning of MarkLigPos 1184 * subtable */ 1185 USHORT classCount; /* Number of defined mark classes */ 1186 OffsetTo<MarkArray> 1187 markArray; /* Offset to MarkArray table--from 1188 * beginning of MarkLigPos subtable */ 1189 OffsetTo<LigatureArray> 1190 ligatureArray; /* Offset to LigatureArray table--from 1191 * beginning of MarkLigPos subtable */ 1192 public: 1193 DEFINE_SIZE_STATIC (12); 1194 }; 1195 1196 struct MarkLigPos 1197 { 1198 template <typename context_t> 1199 inline typename context_t::return_t dispatch (context_t *c) const 1200 { 1201 TRACE_DISPATCH (this); 1202 switch (u.format) { 1203 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 1204 default:return TRACE_RETURN (c->default_return_value ()); 1205 } 1206 } 1207 1208 inline bool sanitize (hb_sanitize_context_t *c) { 1209 TRACE_SANITIZE (this); 1210 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 1211 switch (u.format) { 1212 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 1213 default:return TRACE_RETURN (true); 1214 } 1215 } 1216 1217 protected: 1218 union { 1219 USHORT format; /* Format identifier */ 1220 MarkLigPosFormat1 format1; 1221 } u; 1222 }; 1223 1224 1225 typedef AnchorMatrix Mark2Array; /* mark2-major-- 1226 * in order of Mark2Coverage Index--, 1227 * mark1-minor-- 1228 * ordered by class--zero-based. */ 1229 1230 struct MarkMarkPosFormat1 1231 { 1232 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1233 { 1234 TRACE_COLLECT_GLYPHS (this); 1235 (this+mark1Coverage).add_coverage (c->input); 1236 (this+mark2Coverage).add_coverage (c->input); 1237 /* TODO only add combinations that have nonzero adjustment. */ 1238 } 1239 1240 inline const Coverage &get_coverage (void) const 1241 { 1242 return this+mark1Coverage; 1243 } 1244 1245 inline bool apply (hb_apply_context_t *c) const 1246 { 1247 TRACE_APPLY (this); 1248 unsigned int mark1_index = (this+mark1Coverage).get_coverage (c->buffer->cur().codepoint); 1249 if (likely (mark1_index == NOT_COVERED)) return TRACE_RETURN (false); 1250 1251 /* now we search backwards for a suitable mark glyph until a non-mark glyph */ 1252 hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, c->buffer->idx, 1); 1253 skippy_iter.set_lookup_props (c->lookup_props & ~LookupFlag::IgnoreFlags); 1254 if (!skippy_iter.prev ()) return TRACE_RETURN (false); 1255 1256 if (!(c->buffer->info[skippy_iter.idx].glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_MARK)) { return TRACE_RETURN (false); } 1257 1258 unsigned int j = skippy_iter.idx; 1259 1260 unsigned int id1 = get_lig_id (c->buffer->cur()); 1261 unsigned int id2 = get_lig_id (c->buffer->info[j]); 1262 unsigned int comp1 = get_lig_comp (c->buffer->cur()); 1263 unsigned int comp2 = get_lig_comp (c->buffer->info[j]); 1264 1265 if (likely (id1 == id2)) { 1266 if (id1 == 0) /* Marks belonging to the same base. */ 1267 goto good; 1268 else if (comp1 == comp2) /* Marks belonging to the same ligature component. */ 1269 goto good; 1270 } else { 1271 /* If ligature ids don't match, it may be the case that one of the marks 1272 * itself is a ligature. In which case match. */ 1273 if ((id1 > 0 && !comp1) || (id2 > 0 && !comp2)) 1274 goto good; 1275 } 1276 1277 /* Didn't match. */ 1278 return TRACE_RETURN (false); 1279 1280 good: 1281 unsigned int mark2_index = (this+mark2Coverage).get_coverage (c->buffer->info[j].codepoint); 1282 if (mark2_index == NOT_COVERED) return TRACE_RETURN (false); 1283 1284 return TRACE_RETURN ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j)); 1285 } 1286 1287 inline bool sanitize (hb_sanitize_context_t *c) { 1288 TRACE_SANITIZE (this); 1289 return TRACE_RETURN (c->check_struct (this) && mark1Coverage.sanitize (c, this) && 1290 mark2Coverage.sanitize (c, this) && mark1Array.sanitize (c, this) 1291 && mark2Array.sanitize (c, this, (unsigned int) classCount)); 1292 } 1293 1294 protected: 1295 USHORT format; /* Format identifier--format = 1 */ 1296 OffsetTo<Coverage> 1297 mark1Coverage; /* Offset to Combining Mark1 Coverage 1298 * table--from beginning of MarkMarkPos 1299 * subtable */ 1300 OffsetTo<Coverage> 1301 mark2Coverage; /* Offset to Combining Mark2 Coverage 1302 * table--from beginning of MarkMarkPos 1303 * subtable */ 1304 USHORT classCount; /* Number of defined mark classes */ 1305 OffsetTo<MarkArray> 1306 mark1Array; /* Offset to Mark1Array table--from 1307 * beginning of MarkMarkPos subtable */ 1308 OffsetTo<Mark2Array> 1309 mark2Array; /* Offset to Mark2Array table--from 1310 * beginning of MarkMarkPos subtable */ 1311 public: 1312 DEFINE_SIZE_STATIC (12); 1313 }; 1314 1315 struct MarkMarkPos 1316 { 1317 template <typename context_t> 1318 inline typename context_t::return_t dispatch (context_t *c) const 1319 { 1320 TRACE_DISPATCH (this); 1321 switch (u.format) { 1322 case 1: return TRACE_RETURN (c->dispatch (u.format1)); 1323 default:return TRACE_RETURN (c->default_return_value ()); 1324 } 1325 } 1326 1327 inline bool sanitize (hb_sanitize_context_t *c) { 1328 TRACE_SANITIZE (this); 1329 if (!u.format.sanitize (c)) return TRACE_RETURN (false); 1330 switch (u.format) { 1331 case 1: return TRACE_RETURN (u.format1.sanitize (c)); 1332 default:return TRACE_RETURN (true); 1333 } 1334 } 1335 1336 protected: 1337 union { 1338 USHORT format; /* Format identifier */ 1339 MarkMarkPosFormat1 format1; 1340 } u; 1341 }; 1342 1343 1344 struct ContextPos : Context {}; 1345 1346 struct ChainContextPos : ChainContext {}; 1347 1348 struct ExtensionPos : Extension<ExtensionPos> 1349 { 1350 typedef struct PosLookupSubTable LookupSubTable; 1351 }; 1352 1353 1354 1355 /* 1356 * PosLookup 1357 */ 1358 1359 1360 struct PosLookupSubTable 1361 { 1362 friend struct PosLookup; 1363 1364 enum Type { 1365 Single = 1, 1366 Pair = 2, 1367 Cursive = 3, 1368 MarkBase = 4, 1369 MarkLig = 5, 1370 MarkMark = 6, 1371 Context = 7, 1372 ChainContext = 8, 1373 Extension = 9 1374 }; 1375 1376 template <typename context_t> 1377 inline typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type) const 1378 { 1379 TRACE_DISPATCH (this); 1380 switch (lookup_type) { 1381 case Single: return TRACE_RETURN (u.single.dispatch (c)); 1382 case Pair: return TRACE_RETURN (u.pair.dispatch (c)); 1383 case Cursive: return TRACE_RETURN (u.cursive.dispatch (c)); 1384 case MarkBase: return TRACE_RETURN (u.markBase.dispatch (c)); 1385 case MarkLig: return TRACE_RETURN (u.markLig.dispatch (c)); 1386 case MarkMark: return TRACE_RETURN (u.markMark.dispatch (c)); 1387 case Context: return TRACE_RETURN (u.context.dispatch (c)); 1388 case ChainContext: return TRACE_RETURN (u.chainContext.dispatch (c)); 1389 case Extension: return TRACE_RETURN (u.extension.dispatch (c)); 1390 default: return TRACE_RETURN (c->default_return_value ()); 1391 } 1392 } 1393 1394 inline bool sanitize (hb_sanitize_context_t *c, unsigned int lookup_type) { 1395 TRACE_SANITIZE (this); 1396 if (!u.header.sub_format.sanitize (c)) 1397 return TRACE_RETURN (false); 1398 switch (lookup_type) { 1399 case Single: return TRACE_RETURN (u.single.sanitize (c)); 1400 case Pair: return TRACE_RETURN (u.pair.sanitize (c)); 1401 case Cursive: return TRACE_RETURN (u.cursive.sanitize (c)); 1402 case MarkBase: return TRACE_RETURN (u.markBase.sanitize (c)); 1403 case MarkLig: return TRACE_RETURN (u.markLig.sanitize (c)); 1404 case MarkMark: return TRACE_RETURN (u.markMark.sanitize (c)); 1405 case Context: return TRACE_RETURN (u.context.sanitize (c)); 1406 case ChainContext: return TRACE_RETURN (u.chainContext.sanitize (c)); 1407 case Extension: return TRACE_RETURN (u.extension.sanitize (c)); 1408 default: return TRACE_RETURN (true); 1409 } 1410 } 1411 1412 protected: 1413 union { 1414 struct { 1415 USHORT sub_format; 1416 } header; 1417 SinglePos single; 1418 PairPos pair; 1419 CursivePos cursive; 1420 MarkBasePos markBase; 1421 MarkLigPos markLig; 1422 MarkMarkPos markMark; 1423 ContextPos context; 1424 ChainContextPos chainContext; 1425 ExtensionPos extension; 1426 } u; 1427 public: 1428 DEFINE_SIZE_UNION (2, header.sub_format); 1429 }; 1430 1431 1432 struct PosLookup : Lookup 1433 { 1434 inline const PosLookupSubTable& get_subtable (unsigned int i) const 1435 { return this+CastR<OffsetArrayOf<PosLookupSubTable> > (subTable)[i]; } 1436 1437 inline hb_collect_glyphs_context_t::return_t collect_glyphs_lookup (hb_collect_glyphs_context_t *c) const 1438 { 1439 TRACE_COLLECT_GLYPHS (this); 1440 c->set_recurse_func (NULL); 1441 return TRACE_RETURN (dispatch (c)); 1442 } 1443 1444 template <typename set_t> 1445 inline void add_coverage (set_t *glyphs) const 1446 { 1447 hb_get_coverage_context_t c; 1448 const Coverage *last = NULL; 1449 unsigned int count = get_subtable_count (); 1450 for (unsigned int i = 0; i < count; i++) { 1451 const Coverage *coverage = &get_subtable (i).dispatch (&c, get_type ()); 1452 if (coverage != last) { 1453 coverage->add_coverage (glyphs); 1454 last = coverage; 1455 } 1456 } 1457 } 1458 1459 inline bool apply_once (hb_apply_context_t *c) const 1460 { 1461 TRACE_APPLY (this); 1462 if (!c->check_glyph_property (&c->buffer->cur(), c->lookup_props)) 1463 return TRACE_RETURN (false); 1464 return TRACE_RETURN (dispatch (c)); 1465 } 1466 1467 static bool apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index); 1468 1469 inline bool apply_string (hb_apply_context_t *c, const hb_set_digest_t *digest) const 1470 { 1471 bool ret = false; 1472 1473 if (unlikely (!c->buffer->len || !c->lookup_mask)) 1474 return false; 1475 1476 c->set_recurse_func (apply_recurse_func); 1477 c->set_lookup (*this); 1478 1479 c->buffer->idx = 0; 1480 1481 while (c->buffer->idx < c->buffer->len) 1482 { 1483 if ((c->buffer->cur().mask & c->lookup_mask) && 1484 digest->may_have (c->buffer->cur().codepoint) && 1485 apply_once (c)) 1486 ret = true; 1487 else 1488 c->buffer->idx++; 1489 } 1490 1491 return ret; 1492 } 1493 1494 template <typename context_t> 1495 static inline typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index); 1496 1497 template <typename context_t> 1498 inline typename context_t::return_t dispatch (context_t *c) const 1499 { 1500 TRACE_DISPATCH (this); 1501 unsigned int lookup_type = get_type (); 1502 unsigned int count = get_subtable_count (); 1503 for (unsigned int i = 0; i < count; i++) { 1504 typename context_t::return_t r = get_subtable (i).dispatch (c, lookup_type); 1505 if (c->stop_sublookup_iteration (r)) 1506 return TRACE_RETURN (r); 1507 } 1508 return TRACE_RETURN (c->default_return_value ()); 1509 } 1510 1511 inline bool sanitize (hb_sanitize_context_t *c) { 1512 TRACE_SANITIZE (this); 1513 if (unlikely (!Lookup::sanitize (c))) return TRACE_RETURN (false); 1514 OffsetArrayOf<PosLookupSubTable> &list = CastR<OffsetArrayOf<PosLookupSubTable> > (subTable); 1515 return TRACE_RETURN (list.sanitize (c, this, get_type ())); 1516 } 1517 }; 1518 1519 typedef OffsetListOf<PosLookup> PosLookupList; 1520 1521 /* 1522 * GPOS -- The Glyph Positioning Table 1523 */ 1524 1525 struct GPOS : GSUBGPOS 1526 { 1527 static const hb_tag_t Tag = HB_OT_TAG_GPOS; 1528 1529 inline const PosLookup& get_lookup (unsigned int i) const 1530 { return CastR<PosLookup> (GSUBGPOS::get_lookup (i)); } 1531 1532 static inline void position_start (hb_font_t *font, hb_buffer_t *buffer); 1533 static inline void position_finish (hb_font_t *font, hb_buffer_t *buffer); 1534 1535 inline bool sanitize (hb_sanitize_context_t *c) { 1536 TRACE_SANITIZE (this); 1537 if (unlikely (!GSUBGPOS::sanitize (c))) return TRACE_RETURN (false); 1538 OffsetTo<PosLookupList> &list = CastR<OffsetTo<PosLookupList> > (lookupList); 1539 return TRACE_RETURN (list.sanitize (c, this)); 1540 } 1541 public: 1542 DEFINE_SIZE_STATIC (10); 1543 }; 1544 1545 1546 static void 1547 fix_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction) 1548 { 1549 unsigned int j = pos[i].cursive_chain(); 1550 if (likely (!j)) 1551 return; 1552 1553 j += i; 1554 1555 pos[i].cursive_chain() = 0; 1556 1557 fix_cursive_minor_offset (pos, j, direction); 1558 1559 if (HB_DIRECTION_IS_HORIZONTAL (direction)) 1560 pos[i].y_offset += pos[j].y_offset; 1561 else 1562 pos[i].x_offset += pos[j].x_offset; 1563 } 1564 1565 static void 1566 fix_mark_attachment (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction) 1567 { 1568 if (likely (!(pos[i].attach_lookback()))) 1569 return; 1570 1571 unsigned int j = i - pos[i].attach_lookback(); 1572 1573 pos[i].x_offset += pos[j].x_offset; 1574 pos[i].y_offset += pos[j].y_offset; 1575 1576 if (HB_DIRECTION_IS_FORWARD (direction)) 1577 for (unsigned int k = j; k < i; k++) { 1578 pos[i].x_offset -= pos[k].x_advance; 1579 pos[i].y_offset -= pos[k].y_advance; 1580 } 1581 else 1582 for (unsigned int k = j + 1; k < i + 1; k++) { 1583 pos[i].x_offset += pos[k].x_advance; 1584 pos[i].y_offset += pos[k].y_advance; 1585 } 1586 } 1587 1588 void 1589 GPOS::position_start (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer) 1590 { 1591 buffer->clear_positions (); 1592 1593 unsigned int count = buffer->len; 1594 for (unsigned int i = 0; i < count; i++) 1595 buffer->pos[i].attach_lookback() = buffer->pos[i].cursive_chain() = 0; 1596 } 1597 1598 void 1599 GPOS::position_finish (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer) 1600 { 1601 unsigned int len; 1602 hb_glyph_position_t *pos = hb_buffer_get_glyph_positions (buffer, &len); 1603 hb_direction_t direction = buffer->props.direction; 1604 1605 /* Handle cursive connections */ 1606 for (unsigned int i = 0; i < len; i++) 1607 fix_cursive_minor_offset (pos, i, direction); 1608 1609 /* Handle attachments */ 1610 for (unsigned int i = 0; i < len; i++) 1611 fix_mark_attachment (pos, i, direction); 1612 1613 HB_BUFFER_DEALLOCATE_VAR (buffer, syllable); 1614 HB_BUFFER_DEALLOCATE_VAR (buffer, lig_props); 1615 HB_BUFFER_DEALLOCATE_VAR (buffer, glyph_props); 1616 } 1617 1618 1619 /* Out-of-class implementation for methods recursing */ 1620 1621 template <typename context_t> 1622 inline typename context_t::return_t PosLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index) 1623 { 1624 const GPOS &gpos = *(hb_ot_layout_from_face (c->face)->gpos); 1625 const PosLookup &l = gpos.get_lookup (lookup_index); 1626 return l.dispatch (c); 1627 } 1628 1629 inline bool PosLookup::apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index) 1630 { 1631 const GPOS &gpos = *(hb_ot_layout_from_face (c->face)->gpos); 1632 const PosLookup &l = gpos.get_lookup (lookup_index); 1633 unsigned int saved_lookup_props = c->lookup_props; 1634 c->set_lookup (l); 1635 bool ret = l.apply_once (c); 1636 c->lookup_props = saved_lookup_props; 1637 return ret; 1638 } 1639 1640 1641 #undef attach_lookback 1642 #undef cursive_chain 1643 1644 1645 } /* namespace OT */ 1646 1647 1648 #endif /* HB_OT_LAYOUT_GPOS_TABLE_HH */ 1649