Home | History | Annotate | Download | only in src
      1 /*
      2  * Copyright  2007,2008,2009,2010  Red Hat, Inc.
      3  * Copyright  2012  Google, Inc.
      4  *
      5  *  This is part of HarfBuzz, a text shaping library.
      6  *
      7  * Permission is hereby granted, without written agreement and without
      8  * license or royalty fees, to use, copy, modify, and distribute this
      9  * software and its documentation for any purpose, provided that the
     10  * above copyright notice and the following two paragraphs appear in
     11  * all copies of this software.
     12  *
     13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
     14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
     15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
     16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
     17  * DAMAGE.
     18  *
     19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
     20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
     21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
     22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
     23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
     24  *
     25  * Red Hat Author(s): Behdad Esfahbod
     26  * Google Author(s): Behdad Esfahbod
     27  */
     28 
     29 #ifndef HB_OPEN_TYPE_PRIVATE_HH
     30 #define HB_OPEN_TYPE_PRIVATE_HH
     31 
     32 #include "hb-private.hh"
     33 #include "hb-face-private.hh"
     34 
     35 
     36 namespace OT {
     37 
     38 
     39 
     40 /*
     41  * Casts
     42  */
     43 
     44 /* Cast to struct T, reference to reference */
     45 template<typename Type, typename TObject>
     46 static inline const Type& CastR(const TObject &X)
     47 { return reinterpret_cast<const Type&> (X); }
     48 template<typename Type, typename TObject>
     49 static inline Type& CastR(TObject &X)
     50 { return reinterpret_cast<Type&> (X); }
     51 
     52 /* Cast to struct T, pointer to pointer */
     53 template<typename Type, typename TObject>
     54 static inline const Type* CastP(const TObject *X)
     55 { return reinterpret_cast<const Type*> (X); }
     56 template<typename Type, typename TObject>
     57 static inline Type* CastP(TObject *X)
     58 { return reinterpret_cast<Type*> (X); }
     59 
     60 /* StructAtOffset<T>(P,Ofs) returns the struct T& that is placed at memory
     61  * location pointed to by P plus Ofs bytes. */
     62 template<typename Type>
     63 static inline const Type& StructAtOffset(const void *P, unsigned int offset)
     64 { return * reinterpret_cast<const Type*> ((const char *) P + offset); }
     65 template<typename Type>
     66 static inline Type& StructAtOffset(void *P, unsigned int offset)
     67 { return * reinterpret_cast<Type*> ((char *) P + offset); }
     68 
     69 /* StructAfter<T>(X) returns the struct T& that is placed after X.
     70  * Works with X of variable size also.  X must implement get_size() */
     71 template<typename Type, typename TObject>
     72 static inline const Type& StructAfter(const TObject &X)
     73 { return StructAtOffset<Type>(&X, X.get_size()); }
     74 template<typename Type, typename TObject>
     75 static inline Type& StructAfter(TObject &X)
     76 { return StructAtOffset<Type>(&X, X.get_size()); }
     77 
     78 
     79 
     80 /*
     81  * Size checking
     82  */
     83 
     84 /* Check _assertion in a method environment */
     85 #define _DEFINE_INSTANCE_ASSERTION1(_line, _assertion) \
     86   inline void _instance_assertion_on_line_##_line (void) const \
     87   { \
     88     ASSERT_STATIC (_assertion); \
     89     ASSERT_INSTANCE_POD (*this); /* Make sure it's POD. */ \
     90   }
     91 # define _DEFINE_INSTANCE_ASSERTION0(_line, _assertion) _DEFINE_INSTANCE_ASSERTION1 (_line, _assertion)
     92 # define DEFINE_INSTANCE_ASSERTION(_assertion) _DEFINE_INSTANCE_ASSERTION0 (__LINE__, _assertion)
     93 
     94 /* Check that _code compiles in a method environment */
     95 #define _DEFINE_COMPILES_ASSERTION1(_line, _code) \
     96   inline void _compiles_assertion_on_line_##_line (void) const \
     97   { _code; }
     98 # define _DEFINE_COMPILES_ASSERTION0(_line, _code) _DEFINE_COMPILES_ASSERTION1 (_line, _code)
     99 # define DEFINE_COMPILES_ASSERTION(_code) _DEFINE_COMPILES_ASSERTION0 (__LINE__, _code)
    100 
    101 
    102 #define DEFINE_SIZE_STATIC(size) \
    103   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size)); \
    104   static const unsigned int static_size = (size); \
    105   static const unsigned int min_size = (size); \
    106   inline unsigned int get_size (void) const { return (size); }
    107 
    108 #define DEFINE_SIZE_UNION(size, _member) \
    109   DEFINE_INSTANCE_ASSERTION (0*sizeof(this->u._member.static_size) + sizeof(this->u._member) == (size)); \
    110   static const unsigned int min_size = (size)
    111 
    112 #define DEFINE_SIZE_MIN(size) \
    113   DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)); \
    114   static const unsigned int min_size = (size)
    115 
    116 #define DEFINE_SIZE_ARRAY(size, array) \
    117   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (array[0])); \
    118   DEFINE_COMPILES_ASSERTION ((void) array[0].static_size) \
    119   static const unsigned int min_size = (size)
    120 
    121 #define DEFINE_SIZE_ARRAY2(size, array1, array2) \
    122   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (this->array1[0]) + sizeof (this->array2[0])); \
    123   DEFINE_COMPILES_ASSERTION ((void) array1[0].static_size; (void) array2[0].static_size) \
    124   static const unsigned int min_size = (size)
    125 
    126 
    127 
    128 /*
    129  * Null objects
    130  */
    131 
    132 /* Global nul-content Null pool.  Enlarge as necessary. */
    133 /* TODO This really should be a extern HB_INTERNAL and defined somewhere... */
    134 static const void *_NullPool[(256+8) / sizeof (void *)];
    135 
    136 /* Generic nul-content Null objects. */
    137 template <typename Type>
    138 static inline const Type& Null (void) {
    139   ASSERT_STATIC (sizeof (Type) <= sizeof (_NullPool));
    140   return *CastP<Type> (_NullPool);
    141 }
    142 
    143 /* Specializaiton for arbitrary-content arbitrary-sized Null objects. */
    144 #define DEFINE_NULL_DATA(Type, data) \
    145 static const char _Null##Type[sizeof (Type) + 1] = data; /* +1 is for nul-termination in data */ \
    146 template <> \
    147 /*static*/ inline const Type& Null<Type> (void) { \
    148   return *CastP<Type> (_Null##Type); \
    149 } /* The following line really exists such that we end in a place needing semicolon */ \
    150 ASSERT_STATIC (Type::min_size + 1 <= sizeof (_Null##Type))
    151 
    152 /* Accessor macro. */
    153 #define Null(Type) Null<Type>()
    154 
    155 
    156 /*
    157  * Dispatch
    158  */
    159 
    160 template <typename Context, typename Return, unsigned int MaxDebugDepth>
    161 struct hb_dispatch_context_t
    162 {
    163   static const unsigned int max_debug_depth = MaxDebugDepth;
    164   typedef Return return_t;
    165   template <typename T, typename F>
    166   inline bool may_dispatch (const T *obj, const F *format) { return true; }
    167   static return_t no_dispatch_return_value (void) { return Context::default_return_value (); }
    168 };
    169 
    170 
    171 /*
    172  * Sanitize
    173  */
    174 
    175 #ifndef HB_DEBUG_SANITIZE
    176 #define HB_DEBUG_SANITIZE (HB_DEBUG+0)
    177 #endif
    178 
    179 
    180 #define TRACE_SANITIZE(this) \
    181 	hb_auto_trace_t<HB_DEBUG_SANITIZE, bool> trace \
    182 	(&c->debug_depth, c->get_name (), this, HB_FUNC, \
    183 	 "");
    184 
    185 /* This limits sanitizing time on really broken fonts. */
    186 #ifndef HB_SANITIZE_MAX_EDITS
    187 #define HB_SANITIZE_MAX_EDITS 32
    188 #endif
    189 
    190 struct hb_sanitize_context_t :
    191        hb_dispatch_context_t<hb_sanitize_context_t, bool, HB_DEBUG_SANITIZE>
    192 {
    193   inline hb_sanitize_context_t (void) :
    194 	debug_depth (0),
    195 	start (NULL), end (NULL),
    196 	writable (false), edit_count (0),
    197 	blob (NULL) {}
    198 
    199   inline const char *get_name (void) { return "SANITIZE"; }
    200   template <typename T, typename F>
    201   inline bool may_dispatch (const T *obj, const F *format)
    202   { return format->sanitize (this); }
    203   template <typename T>
    204   inline return_t dispatch (const T &obj) { return obj.sanitize (this); }
    205   static return_t default_return_value (void) { return true; }
    206   static return_t no_dispatch_return_value (void) { return false; }
    207   bool stop_sublookup_iteration (const return_t r) const { return !r; }
    208 
    209   inline void init (hb_blob_t *b)
    210   {
    211     this->blob = hb_blob_reference (b);
    212     this->writable = false;
    213   }
    214 
    215   inline void start_processing (void)
    216   {
    217     this->start = hb_blob_get_data (this->blob, NULL);
    218     this->end = this->start + hb_blob_get_length (this->blob);
    219     assert (this->start <= this->end); /* Must not overflow. */
    220     this->edit_count = 0;
    221     this->debug_depth = 0;
    222 
    223     DEBUG_MSG_LEVEL (SANITIZE, start, 0, +1,
    224 		     "start [%p..%p] (%lu bytes)",
    225 		     this->start, this->end,
    226 		     (unsigned long) (this->end - this->start));
    227   }
    228 
    229   inline void end_processing (void)
    230   {
    231     DEBUG_MSG_LEVEL (SANITIZE, this->start, 0, -1,
    232 		     "end [%p..%p] %u edit requests",
    233 		     this->start, this->end, this->edit_count);
    234 
    235     hb_blob_destroy (this->blob);
    236     this->blob = NULL;
    237     this->start = this->end = NULL;
    238   }
    239 
    240   inline bool check_range (const void *base, unsigned int len) const
    241   {
    242     const char *p = (const char *) base;
    243     bool ok = this->start <= p && p <= this->end && (unsigned int) (this->end - p) >= len;
    244 
    245     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
    246        "check_range [%p..%p] (%d bytes) in [%p..%p] -> %s",
    247        p, p + len, len,
    248        this->start, this->end,
    249        ok ? "OK" : "OUT-OF-RANGE");
    250 
    251     return likely (ok);
    252   }
    253 
    254   inline bool check_array (const void *base, unsigned int record_size, unsigned int len) const
    255   {
    256     const char *p = (const char *) base;
    257     bool overflows = _hb_unsigned_int_mul_overflows (len, record_size);
    258     unsigned int array_size = record_size * len;
    259     bool ok = !overflows && this->check_range (base, array_size);
    260 
    261     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
    262        "check_array [%p..%p] (%d*%d=%d bytes) in [%p..%p] -> %s",
    263        p, p + (record_size * len), record_size, len, (unsigned int) array_size,
    264        this->start, this->end,
    265        overflows ? "OVERFLOWS" : ok ? "OK" : "OUT-OF-RANGE");
    266 
    267     return likely (ok);
    268   }
    269 
    270   template <typename Type>
    271   inline bool check_struct (const Type *obj) const
    272   {
    273     return likely (this->check_range (obj, obj->min_size));
    274   }
    275 
    276   inline bool may_edit (const void *base HB_UNUSED, unsigned int len HB_UNUSED)
    277   {
    278     if (this->edit_count >= HB_SANITIZE_MAX_EDITS)
    279       return false;
    280 
    281     const char *p = (const char *) base;
    282     this->edit_count++;
    283 
    284     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
    285        "may_edit(%u) [%p..%p] (%d bytes) in [%p..%p] -> %s",
    286        this->edit_count,
    287        p, p + len, len,
    288        this->start, this->end,
    289        this->writable ? "GRANTED" : "DENIED");
    290 
    291     return this->writable;
    292   }
    293 
    294   template <typename Type, typename ValueType>
    295   inline bool try_set (const Type *obj, const ValueType &v) {
    296     if (this->may_edit (obj, obj->static_size)) {
    297       const_cast<Type *> (obj)->set (v);
    298       return true;
    299     }
    300     return false;
    301   }
    302 
    303   mutable unsigned int debug_depth;
    304   const char *start, *end;
    305   bool writable;
    306   unsigned int edit_count;
    307   hb_blob_t *blob;
    308 };
    309 
    310 
    311 
    312 /* Template to sanitize an object. */
    313 template <typename Type>
    314 struct Sanitizer
    315 {
    316   static hb_blob_t *sanitize (hb_blob_t *blob) {
    317     hb_sanitize_context_t c[1];
    318     bool sane;
    319 
    320     /* TODO is_sane() stuff */
    321 
    322     c->init (blob);
    323 
    324   retry:
    325     DEBUG_MSG_FUNC (SANITIZE, c->start, "start");
    326 
    327     c->start_processing ();
    328 
    329     if (unlikely (!c->start)) {
    330       c->end_processing ();
    331       return blob;
    332     }
    333 
    334     Type *t = CastP<Type> (const_cast<char *> (c->start));
    335 
    336     sane = t->sanitize (c);
    337     if (sane) {
    338       if (c->edit_count) {
    339 	DEBUG_MSG_FUNC (SANITIZE, c->start, "passed first round with %d edits; going for second round", c->edit_count);
    340 
    341         /* sanitize again to ensure no toe-stepping */
    342         c->edit_count = 0;
    343 	sane = t->sanitize (c);
    344 	if (c->edit_count) {
    345 	  DEBUG_MSG_FUNC (SANITIZE, c->start, "requested %d edits in second round; FAILLING", c->edit_count);
    346 	  sane = false;
    347 	}
    348       }
    349     } else {
    350       unsigned int edit_count = c->edit_count;
    351       if (edit_count && !c->writable) {
    352         c->start = hb_blob_get_data_writable (blob, NULL);
    353 	c->end = c->start + hb_blob_get_length (blob);
    354 
    355 	if (c->start) {
    356 	  c->writable = true;
    357 	  /* ok, we made it writable by relocating.  try again */
    358 	  DEBUG_MSG_FUNC (SANITIZE, c->start, "retry");
    359 	  goto retry;
    360 	}
    361       }
    362     }
    363 
    364     c->end_processing ();
    365 
    366     DEBUG_MSG_FUNC (SANITIZE, c->start, sane ? "PASSED" : "FAILED");
    367     if (sane)
    368       return blob;
    369     else {
    370       hb_blob_destroy (blob);
    371       return hb_blob_get_empty ();
    372     }
    373   }
    374 
    375   static const Type* lock_instance (hb_blob_t *blob) {
    376     hb_blob_make_immutable (blob);
    377     const char *base = hb_blob_get_data (blob, NULL);
    378     return unlikely (!base) ? &Null(Type) : CastP<Type> (base);
    379   }
    380 };
    381 
    382 
    383 
    384 /*
    385  * Serialize
    386  */
    387 
    388 #ifndef HB_DEBUG_SERIALIZE
    389 #define HB_DEBUG_SERIALIZE (HB_DEBUG+0)
    390 #endif
    391 
    392 
    393 #define TRACE_SERIALIZE(this) \
    394 	hb_auto_trace_t<HB_DEBUG_SERIALIZE, bool> trace \
    395 	(&c->debug_depth, "SERIALIZE", c, HB_FUNC, \
    396 	 "");
    397 
    398 
    399 struct hb_serialize_context_t
    400 {
    401   inline hb_serialize_context_t (void *start_, unsigned int size)
    402   {
    403     this->start = (char *) start_;
    404     this->end = this->start + size;
    405 
    406     this->ran_out_of_room = false;
    407     this->head = this->start;
    408     this->debug_depth = 0;
    409   }
    410 
    411   template <typename Type>
    412   inline Type *start_serialize (void)
    413   {
    414     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1,
    415 		     "start [%p..%p] (%lu bytes)",
    416 		     this->start, this->end,
    417 		     (unsigned long) (this->end - this->start));
    418 
    419     return start_embed<Type> ();
    420   }
    421 
    422   inline void end_serialize (void)
    423   {
    424     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, -1,
    425 		     "end [%p..%p] serialized %d bytes; %s",
    426 		     this->start, this->end,
    427 		     (int) (this->head - this->start),
    428 		     this->ran_out_of_room ? "RAN OUT OF ROOM" : "did not ran out of room");
    429 
    430   }
    431 
    432   template <typename Type>
    433   inline Type *copy (void)
    434   {
    435     assert (!this->ran_out_of_room);
    436     unsigned int len = this->head - this->start;
    437     void *p = malloc (len);
    438     if (p)
    439       memcpy (p, this->start, len);
    440     return reinterpret_cast<Type *> (p);
    441   }
    442 
    443   template <typename Type>
    444   inline Type *allocate_size (unsigned int size)
    445   {
    446     if (unlikely (this->ran_out_of_room || this->end - this->head < ptrdiff_t (size))) {
    447       this->ran_out_of_room = true;
    448       return NULL;
    449     }
    450     memset (this->head, 0, size);
    451     char *ret = this->head;
    452     this->head += size;
    453     return reinterpret_cast<Type *> (ret);
    454   }
    455 
    456   template <typename Type>
    457   inline Type *allocate_min (void)
    458   {
    459     return this->allocate_size<Type> (Type::min_size);
    460   }
    461 
    462   template <typename Type>
    463   inline Type *start_embed (void)
    464   {
    465     Type *ret = reinterpret_cast<Type *> (this->head);
    466     return ret;
    467   }
    468 
    469   template <typename Type>
    470   inline Type *embed (const Type &obj)
    471   {
    472     unsigned int size = obj.get_size ();
    473     Type *ret = this->allocate_size<Type> (size);
    474     if (unlikely (!ret)) return NULL;
    475     memcpy (ret, obj, size);
    476     return ret;
    477   }
    478 
    479   template <typename Type>
    480   inline Type *extend_min (Type &obj)
    481   {
    482     unsigned int size = obj.min_size;
    483     assert (this->start <= (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
    484     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL;
    485     return reinterpret_cast<Type *> (&obj);
    486   }
    487 
    488   template <typename Type>
    489   inline Type *extend (Type &obj)
    490   {
    491     unsigned int size = obj.get_size ();
    492     assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
    493     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL;
    494     return reinterpret_cast<Type *> (&obj);
    495   }
    496 
    497   inline void truncate (void *new_head)
    498   {
    499     assert (this->start < new_head && new_head <= this->head);
    500     this->head = (char *) new_head;
    501   }
    502 
    503   unsigned int debug_depth;
    504   char *start, *end, *head;
    505   bool ran_out_of_room;
    506 };
    507 
    508 template <typename Type>
    509 struct Supplier
    510 {
    511   inline Supplier (const Type *array, unsigned int len_)
    512   {
    513     head = array;
    514     len = len_;
    515   }
    516   inline const Type operator [] (unsigned int i) const
    517   {
    518     if (unlikely (i >= len)) return Type ();
    519     return head[i];
    520   }
    521 
    522   inline void advance (unsigned int count)
    523   {
    524     if (unlikely (count > len))
    525       count = len;
    526     len -= count;
    527     head += count;
    528   }
    529 
    530   private:
    531   inline Supplier (const Supplier<Type> &); /* Disallow copy */
    532   inline Supplier<Type>& operator= (const Supplier<Type> &); /* Disallow copy */
    533 
    534   unsigned int len;
    535   const Type *head;
    536 };
    537 
    538 
    539 
    540 
    541 /*
    542  *
    543  * The OpenType Font File: Data Types
    544  */
    545 
    546 
    547 /* "The following data types are used in the OpenType font file.
    548  *  All OpenType fonts use Motorola-style byte ordering (Big Endian):" */
    549 
    550 /*
    551  * Int types
    552  */
    553 
    554 
    555 template <typename Type, int Bytes> struct BEInt;
    556 
    557 template <typename Type>
    558 struct BEInt<Type, 1>
    559 {
    560   public:
    561   inline void set (Type V)
    562   {
    563     v = V;
    564   }
    565   inline operator Type (void) const
    566   {
    567     return v;
    568   }
    569   private: uint8_t v;
    570 };
    571 template <typename Type>
    572 struct BEInt<Type, 2>
    573 {
    574   public:
    575   inline void set (Type V)
    576   {
    577     v[0] = (V >>  8) & 0xFF;
    578     v[1] = (V      ) & 0xFF;
    579   }
    580   inline operator Type (void) const
    581   {
    582     return (v[0] <<  8)
    583          + (v[1]      );
    584   }
    585   private: uint8_t v[2];
    586 };
    587 template <typename Type>
    588 struct BEInt<Type, 3>
    589 {
    590   public:
    591   inline void set (Type V)
    592   {
    593     v[0] = (V >> 16) & 0xFF;
    594     v[1] = (V >>  8) & 0xFF;
    595     v[2] = (V      ) & 0xFF;
    596   }
    597   inline operator Type (void) const
    598   {
    599     return (v[0] << 16)
    600          + (v[1] <<  8)
    601          + (v[2]      );
    602   }
    603   private: uint8_t v[3];
    604 };
    605 template <typename Type>
    606 struct BEInt<Type, 4>
    607 {
    608   public:
    609   inline void set (Type V)
    610   {
    611     v[0] = (V >> 24) & 0xFF;
    612     v[1] = (V >> 16) & 0xFF;
    613     v[2] = (V >>  8) & 0xFF;
    614     v[3] = (V      ) & 0xFF;
    615   }
    616   inline operator Type (void) const
    617   {
    618     return (v[0] << 24)
    619          + (v[1] << 16)
    620          + (v[2] <<  8)
    621          + (v[3]      );
    622   }
    623   private: uint8_t v[4];
    624 };
    625 
    626 /* Integer types in big-endian order and no alignment requirement */
    627 template <typename Type, unsigned int Size>
    628 struct IntType
    629 {
    630   inline void set (Type i) { v.set (i); }
    631   inline operator Type(void) const { return v; }
    632   inline bool operator == (const IntType<Type,Size> &o) const { return (Type) v == (Type) o.v; }
    633   inline bool operator != (const IntType<Type,Size> &o) const { return !(*this == o); }
    634   static inline int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b) { return b->cmp (*a); }
    635   inline int cmp (Type a) const
    636   {
    637     Type b = v;
    638     if (sizeof (Type) < sizeof (int))
    639       return (int) a - (int) b;
    640     else
    641       return a < b ? -1 : a == b ? 0 : +1;
    642   }
    643   inline bool sanitize (hb_sanitize_context_t *c) const
    644   {
    645     TRACE_SANITIZE (this);
    646     return_trace (likely (c->check_struct (this)));
    647   }
    648   protected:
    649   BEInt<Type, Size> v;
    650   public:
    651   DEFINE_SIZE_STATIC (Size);
    652 };
    653 
    654 typedef	IntType<int8_t	, 1> CHAR;	/* 8-bit signed integer. */
    655 typedef	IntType<uint8_t	, 1> BYTE;	/* 8-bit unsigned integer. */
    656 typedef	IntType<int8_t	, 1> INT8;	/* 8-bit signed integer. */
    657 typedef IntType<uint16_t, 2> USHORT;	/* 16-bit unsigned integer. */
    658 typedef IntType<int16_t,  2> SHORT;	/* 16-bit signed integer. */
    659 typedef IntType<uint32_t, 4> ULONG;	/* 32-bit unsigned integer. */
    660 typedef IntType<int32_t,  4> LONG;	/* 32-bit signed integer. */
    661 typedef IntType<uint32_t, 3> UINT24;	/* 24-bit unsigned integer. */
    662 
    663 /* 16-bit signed integer (SHORT) that describes a quantity in FUnits. */
    664 typedef SHORT FWORD;
    665 
    666 /* 16-bit unsigned integer (USHORT) that describes a quantity in FUnits. */
    667 typedef USHORT UFWORD;
    668 
    669 /* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */
    670 struct F2DOT14 : SHORT
    671 {
    672   //inline float to_float (void) const { return ???; }
    673   //inline void set_float (float f) { v.set (f * ???); }
    674   public:
    675   DEFINE_SIZE_STATIC (2);
    676 };
    677 
    678 /* 32-bit signed fixed-point number (16.16). */
    679 struct Fixed: LONG
    680 {
    681   //inline float to_float (void) const { return ???; }
    682   //inline void set_float (float f) { v.set (f * ???); }
    683   public:
    684   DEFINE_SIZE_STATIC (4);
    685 };
    686 
    687 /* Date represented in number of seconds since 12:00 midnight, January 1,
    688  * 1904. The value is represented as a signed 64-bit integer. */
    689 struct LONGDATETIME
    690 {
    691   inline bool sanitize (hb_sanitize_context_t *c) const
    692   {
    693     TRACE_SANITIZE (this);
    694     return_trace (likely (c->check_struct (this)));
    695   }
    696   protected:
    697   LONG major;
    698   ULONG minor;
    699   public:
    700   DEFINE_SIZE_STATIC (8);
    701 };
    702 
    703 /* Array of four uint8s (length = 32 bits) used to identify a script, language
    704  * system, feature, or baseline */
    705 struct Tag : ULONG
    706 {
    707   /* What the char* converters return is NOT nul-terminated.  Print using "%.4s" */
    708   inline operator const char* (void) const { return reinterpret_cast<const char *> (&this->v); }
    709   inline operator char* (void) { return reinterpret_cast<char *> (&this->v); }
    710   public:
    711   DEFINE_SIZE_STATIC (4);
    712 };
    713 DEFINE_NULL_DATA (Tag, "    ");
    714 
    715 /* Glyph index number, same as uint16 (length = 16 bits) */
    716 struct GlyphID : USHORT {
    717   static inline int cmp (const GlyphID *a, const GlyphID *b) { return b->USHORT::cmp (*a); }
    718   inline int cmp (hb_codepoint_t a) const { return (int) a - (int) *this; }
    719 };
    720 
    721 /* Script/language-system/feature index */
    722 struct Index : USHORT {
    723   static const unsigned int NOT_FOUND_INDEX = 0xFFFFu;
    724 };
    725 DEFINE_NULL_DATA (Index, "\xff\xff");
    726 
    727 /* Offset, Null offset = 0 */
    728 template <typename Type=USHORT>
    729 struct Offset : Type
    730 {
    731   inline bool is_null (void) const { return 0 == *this; }
    732   public:
    733   DEFINE_SIZE_STATIC (sizeof(Type));
    734 };
    735 
    736 
    737 /* CheckSum */
    738 struct CheckSum : ULONG
    739 {
    740   /* This is reference implementation from the spec. */
    741   static inline uint32_t CalcTableChecksum (const ULONG *Table, uint32_t Length)
    742   {
    743     uint32_t Sum = 0L;
    744     const ULONG *EndPtr = Table+((Length+3) & ~3) / ULONG::static_size;
    745 
    746     while (Table < EndPtr)
    747       Sum += *Table++;
    748     return Sum;
    749   }
    750 
    751   /* Note: data should be 4byte aligned and have 4byte padding at the end. */
    752   inline void set_for_data (const void *data, unsigned int length)
    753   { set (CalcTableChecksum ((const ULONG *) data, length)); }
    754 
    755   public:
    756   DEFINE_SIZE_STATIC (4);
    757 };
    758 
    759 
    760 /*
    761  * Version Numbers
    762  */
    763 
    764 template <typename FixedType=USHORT>
    765 struct FixedVersion
    766 {
    767   inline uint32_t to_int (void) const { return (major << (sizeof(FixedType) * 8)) + minor; }
    768 
    769   inline bool sanitize (hb_sanitize_context_t *c) const
    770   {
    771     TRACE_SANITIZE (this);
    772     return_trace (c->check_struct (this));
    773   }
    774 
    775   FixedType major;
    776   FixedType minor;
    777   public:
    778   DEFINE_SIZE_STATIC (2 * sizeof(FixedType));
    779 };
    780 
    781 
    782 
    783 /*
    784  * Template subclasses of Offset that do the dereferencing.
    785  * Use: (base+offset)
    786  */
    787 
    788 template <typename Type, typename OffsetType=USHORT>
    789 struct OffsetTo : Offset<OffsetType>
    790 {
    791   inline const Type& operator () (const void *base) const
    792   {
    793     unsigned int offset = *this;
    794     if (unlikely (!offset)) return Null(Type);
    795     return StructAtOffset<Type> (base, offset);
    796   }
    797 
    798   inline Type& serialize (hb_serialize_context_t *c, const void *base)
    799   {
    800     Type *t = c->start_embed<Type> ();
    801     this->set ((char *) t - (char *) base); /* TODO(serialize) Overflow? */
    802     return *t;
    803   }
    804 
    805   inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
    806   {
    807     TRACE_SANITIZE (this);
    808     if (unlikely (!c->check_struct (this))) return_trace (false);
    809     unsigned int offset = *this;
    810     if (unlikely (!offset)) return_trace (true);
    811     if (unlikely (!c->check_range (base, offset))) return_trace (false);
    812     const Type &obj = StructAtOffset<Type> (base, offset);
    813     return_trace (likely (obj.sanitize (c)) || neuter (c));
    814   }
    815   template <typename T>
    816   inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
    817   {
    818     TRACE_SANITIZE (this);
    819     if (unlikely (!c->check_struct (this))) return_trace (false);
    820     unsigned int offset = *this;
    821     if (unlikely (!offset)) return_trace (true);
    822     if (unlikely (!c->check_range (base, offset))) return_trace (false);
    823     const Type &obj = StructAtOffset<Type> (base, offset);
    824     return_trace (likely (obj.sanitize (c, user_data)) || neuter (c));
    825   }
    826 
    827   /* Set the offset to Null */
    828   inline bool neuter (hb_sanitize_context_t *c) const {
    829     return c->try_set (this, 0);
    830   }
    831   DEFINE_SIZE_STATIC (sizeof(OffsetType));
    832 };
    833 template <typename Type> struct LOffsetTo : OffsetTo<Type, ULONG> {};
    834 template <typename Base, typename OffsetType, typename Type>
    835 static inline const Type& operator + (const Base &base, const OffsetTo<Type, OffsetType> &offset) { return offset (base); }
    836 template <typename Base, typename OffsetType, typename Type>
    837 static inline Type& operator + (Base &base, OffsetTo<Type, OffsetType> &offset) { return offset (base); }
    838 
    839 
    840 /*
    841  * Array Types
    842  */
    843 
    844 /* An array with a number of elements. */
    845 template <typename Type, typename LenType=USHORT>
    846 struct ArrayOf
    847 {
    848   const Type *sub_array (unsigned int start_offset, unsigned int *pcount /* IN/OUT */) const
    849   {
    850     unsigned int count = len;
    851     if (unlikely (start_offset > count))
    852       count = 0;
    853     else
    854       count -= start_offset;
    855     count = MIN (count, *pcount);
    856     *pcount = count;
    857     return array + start_offset;
    858   }
    859 
    860   inline const Type& operator [] (unsigned int i) const
    861   {
    862     if (unlikely (i >= len)) return Null(Type);
    863     return array[i];
    864   }
    865   inline Type& operator [] (unsigned int i)
    866   {
    867     return array[i];
    868   }
    869   inline unsigned int get_size (void) const
    870   { return len.static_size + len * Type::static_size; }
    871 
    872   inline bool serialize (hb_serialize_context_t *c,
    873 			 unsigned int items_len)
    874   {
    875     TRACE_SERIALIZE (this);
    876     if (unlikely (!c->extend_min (*this))) return_trace (false);
    877     len.set (items_len); /* TODO(serialize) Overflow? */
    878     if (unlikely (!c->extend (*this))) return_trace (false);
    879     return_trace (true);
    880   }
    881 
    882   inline bool serialize (hb_serialize_context_t *c,
    883 			 Supplier<Type> &items,
    884 			 unsigned int items_len)
    885   {
    886     TRACE_SERIALIZE (this);
    887     if (unlikely (!serialize (c, items_len))) return_trace (false);
    888     for (unsigned int i = 0; i < items_len; i++)
    889       array[i] = items[i];
    890     items.advance (items_len);
    891     return_trace (true);
    892   }
    893 
    894   inline bool sanitize (hb_sanitize_context_t *c) const
    895   {
    896     TRACE_SANITIZE (this);
    897     if (unlikely (!sanitize_shallow (c))) return_trace (false);
    898 
    899     /* Note: for structs that do not reference other structs,
    900      * we do not need to call their sanitize() as we already did
    901      * a bound check on the aggregate array size.  We just include
    902      * a small unreachable expression to make sure the structs
    903      * pointed to do have a simple sanitize(), ie. they do not
    904      * reference other structs via offsets.
    905      */
    906     (void) (false && array[0].sanitize (c));
    907 
    908     return_trace (true);
    909   }
    910   inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
    911   {
    912     TRACE_SANITIZE (this);
    913     if (unlikely (!sanitize_shallow (c))) return_trace (false);
    914     unsigned int count = len;
    915     for (unsigned int i = 0; i < count; i++)
    916       if (unlikely (!array[i].sanitize (c, base)))
    917         return_trace (false);
    918     return_trace (true);
    919   }
    920   template <typename T>
    921   inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
    922   {
    923     TRACE_SANITIZE (this);
    924     if (unlikely (!sanitize_shallow (c))) return_trace (false);
    925     unsigned int count = len;
    926     for (unsigned int i = 0; i < count; i++)
    927       if (unlikely (!array[i].sanitize (c, base, user_data)))
    928         return_trace (false);
    929     return_trace (true);
    930   }
    931 
    932   template <typename SearchType>
    933   inline int lsearch (const SearchType &x) const
    934   {
    935     unsigned int count = len;
    936     for (unsigned int i = 0; i < count; i++)
    937       if (!this->array[i].cmp (x))
    938         return i;
    939     return -1;
    940   }
    941 
    942   private:
    943   inline bool sanitize_shallow (hb_sanitize_context_t *c) const
    944   {
    945     TRACE_SANITIZE (this);
    946     return_trace (c->check_struct (this) && c->check_array (array, Type::static_size, len));
    947   }
    948 
    949   public:
    950   LenType len;
    951   Type array[VAR];
    952   public:
    953   DEFINE_SIZE_ARRAY (sizeof (LenType), array);
    954 };
    955 template <typename Type> struct LArrayOf : ArrayOf<Type, ULONG> {};
    956 
    957 /* Array of Offset's */
    958 template <typename Type, typename OffsetType=USHORT>
    959 struct OffsetArrayOf : ArrayOf<OffsetTo<Type, OffsetType> > {};
    960 
    961 /* Array of offsets relative to the beginning of the array itself. */
    962 template <typename Type>
    963 struct OffsetListOf : OffsetArrayOf<Type>
    964 {
    965   inline const Type& operator [] (unsigned int i) const
    966   {
    967     if (unlikely (i >= this->len)) return Null(Type);
    968     return this+this->array[i];
    969   }
    970 
    971   inline bool sanitize (hb_sanitize_context_t *c) const
    972   {
    973     TRACE_SANITIZE (this);
    974     return_trace (OffsetArrayOf<Type>::sanitize (c, this));
    975   }
    976   template <typename T>
    977   inline bool sanitize (hb_sanitize_context_t *c, T user_data) const
    978   {
    979     TRACE_SANITIZE (this);
    980     return_trace (OffsetArrayOf<Type>::sanitize (c, this, user_data));
    981   }
    982 };
    983 
    984 
    985 /* An array starting at second element. */
    986 template <typename Type, typename LenType=USHORT>
    987 struct HeadlessArrayOf
    988 {
    989   inline const Type& operator [] (unsigned int i) const
    990   {
    991     if (unlikely (i >= len || !i)) return Null(Type);
    992     return array[i-1];
    993   }
    994   inline unsigned int get_size (void) const
    995   { return len.static_size + (len ? len - 1 : 0) * Type::static_size; }
    996 
    997   inline bool serialize (hb_serialize_context_t *c,
    998 			 Supplier<Type> &items,
    999 			 unsigned int items_len)
   1000   {
   1001     TRACE_SERIALIZE (this);
   1002     if (unlikely (!c->extend_min (*this))) return_trace (false);
   1003     len.set (items_len); /* TODO(serialize) Overflow? */
   1004     if (unlikely (!items_len)) return_trace (true);
   1005     if (unlikely (!c->extend (*this))) return_trace (false);
   1006     for (unsigned int i = 0; i < items_len - 1; i++)
   1007       array[i] = items[i];
   1008     items.advance (items_len - 1);
   1009     return_trace (true);
   1010   }
   1011 
   1012   inline bool sanitize_shallow (hb_sanitize_context_t *c) const
   1013   {
   1014     return c->check_struct (this)
   1015 	&& c->check_array (this, Type::static_size, len);
   1016   }
   1017 
   1018   inline bool sanitize (hb_sanitize_context_t *c) const
   1019   {
   1020     TRACE_SANITIZE (this);
   1021     if (unlikely (!sanitize_shallow (c))) return_trace (false);
   1022 
   1023     /* Note: for structs that do not reference other structs,
   1024      * we do not need to call their sanitize() as we already did
   1025      * a bound check on the aggregate array size.  We just include
   1026      * a small unreachable expression to make sure the structs
   1027      * pointed to do have a simple sanitize(), ie. they do not
   1028      * reference other structs via offsets.
   1029      */
   1030     (void) (false && array[0].sanitize (c));
   1031 
   1032     return_trace (true);
   1033   }
   1034 
   1035   LenType len;
   1036   Type array[VAR];
   1037   public:
   1038   DEFINE_SIZE_ARRAY (sizeof (LenType), array);
   1039 };
   1040 
   1041 
   1042 /* An array with sorted elements.  Supports binary searching. */
   1043 template <typename Type, typename LenType=USHORT>
   1044 struct SortedArrayOf : ArrayOf<Type, LenType>
   1045 {
   1046   template <typename SearchType>
   1047   inline int bsearch (const SearchType &x) const
   1048   {
   1049     /* Hand-coded bsearch here since this is in the hot inner loop. */
   1050     int min = 0, max = (int) this->len - 1;
   1051     while (min <= max)
   1052     {
   1053       int mid = (min + max) / 2;
   1054       int c = this->array[mid].cmp (x);
   1055       if (c < 0)
   1056         max = mid - 1;
   1057       else if (c > 0)
   1058         min = mid + 1;
   1059       else
   1060         return mid;
   1061     }
   1062     return -1;
   1063   }
   1064 };
   1065 
   1066 
   1067 /* Lazy struct and blob loaders. */
   1068 
   1069 /* Logic is shared between hb_lazy_loader_t and hb_lazy_table_loader_t */
   1070 template <typename T>
   1071 struct hb_lazy_loader_t
   1072 {
   1073   inline void init (hb_face_t *face_)
   1074   {
   1075     face = face_;
   1076     instance = NULL;
   1077   }
   1078 
   1079   inline void fini (void)
   1080   {
   1081     if (instance && instance != &OT::Null(T))
   1082     {
   1083       instance->fini();
   1084       free (instance);
   1085     }
   1086   }
   1087 
   1088   inline const T* get (void) const
   1089   {
   1090   retry:
   1091     T *p = (T *) hb_atomic_ptr_get (&instance);
   1092     if (unlikely (!p))
   1093     {
   1094       p = (T *) calloc (1, sizeof (T));
   1095       if (unlikely (!p))
   1096         p = const_cast<T *> (&OT::Null(T));
   1097       else
   1098 	p->init (face);
   1099       if (unlikely (!hb_atomic_ptr_cmpexch (const_cast<T **>(&instance), NULL, p)))
   1100       {
   1101 	if (p != &OT::Null(T))
   1102 	  p->fini ();
   1103 	goto retry;
   1104       }
   1105     }
   1106     return p;
   1107   }
   1108 
   1109   inline const T* operator-> (void) const
   1110   {
   1111     return get ();
   1112   }
   1113 
   1114   private:
   1115   hb_face_t *face;
   1116   T *instance;
   1117 };
   1118 
   1119 /* Logic is shared between hb_lazy_loader_t and hb_lazy_table_loader_t */
   1120 template <typename T>
   1121 struct hb_lazy_table_loader_t
   1122 {
   1123   inline void init (hb_face_t *face_)
   1124   {
   1125     face = face_;
   1126     instance = NULL;
   1127     blob = NULL;
   1128   }
   1129 
   1130   inline void fini (void)
   1131   {
   1132     hb_blob_destroy (blob);
   1133   }
   1134 
   1135   inline const T* get (void) const
   1136   {
   1137   retry:
   1138     T *p = (T *) hb_atomic_ptr_get (&instance);
   1139     if (unlikely (!p))
   1140     {
   1141       hb_blob_t *blob_ = OT::Sanitizer<T>::sanitize (face->reference_table (T::tableTag));
   1142       p = const_cast<T *>(OT::Sanitizer<T>::lock_instance (blob_));
   1143       if (!hb_atomic_ptr_cmpexch (const_cast<T **>(&instance), NULL, p))
   1144       {
   1145 	hb_blob_destroy (blob_);
   1146 	goto retry;
   1147       }
   1148       blob = blob_;
   1149     }
   1150     return p;
   1151   }
   1152 
   1153   inline const T* operator-> (void) const
   1154   {
   1155     return get();
   1156   }
   1157 
   1158   private:
   1159   hb_face_t *face;
   1160   T *instance;
   1161   mutable hb_blob_t *blob;
   1162 };
   1163 
   1164 
   1165 } /* namespace OT */
   1166 
   1167 
   1168 #endif /* HB_OPEN_TYPE_PRIVATE_HH */
   1169