Home | History | Annotate | Download | only in src
      1 /*
      2  * Copyright  2007,2008,2009,2010  Red Hat, Inc.
      3  * Copyright  2012  Google, Inc.
      4  *
      5  *  This is part of HarfBuzz, a text shaping library.
      6  *
      7  * Permission is hereby granted, without written agreement and without
      8  * license or royalty fees, to use, copy, modify, and distribute this
      9  * software and its documentation for any purpose, provided that the
     10  * above copyright notice and the following two paragraphs appear in
     11  * all copies of this software.
     12  *
     13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
     14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
     15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
     16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
     17  * DAMAGE.
     18  *
     19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
     20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
     21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
     22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
     23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
     24  *
     25  * Red Hat Author(s): Behdad Esfahbod
     26  * Google Author(s): Behdad Esfahbod
     27  */
     28 
     29 #ifndef HB_OPEN_TYPE_PRIVATE_HH
     30 #define HB_OPEN_TYPE_PRIVATE_HH
     31 
     32 #include "hb-private.hh"
     33 
     34 
     35 namespace OT {
     36 
     37 
     38 
     39 /*
     40  * Casts
     41  */
     42 
     43 /* Cast to struct T, reference to reference */
     44 template<typename Type, typename TObject>
     45 static inline const Type& CastR(const TObject &X)
     46 { return reinterpret_cast<const Type&> (X); }
     47 template<typename Type, typename TObject>
     48 static inline Type& CastR(TObject &X)
     49 { return reinterpret_cast<Type&> (X); }
     50 
     51 /* Cast to struct T, pointer to pointer */
     52 template<typename Type, typename TObject>
     53 static inline const Type* CastP(const TObject *X)
     54 { return reinterpret_cast<const Type*> (X); }
     55 template<typename Type, typename TObject>
     56 static inline Type* CastP(TObject *X)
     57 { return reinterpret_cast<Type*> (X); }
     58 
     59 /* StructAtOffset<T>(P,Ofs) returns the struct T& that is placed at memory
     60  * location pointed to by P plus Ofs bytes. */
     61 template<typename Type>
     62 static inline const Type& StructAtOffset(const void *P, unsigned int offset)
     63 { return * reinterpret_cast<const Type*> ((const char *) P + offset); }
     64 template<typename Type>
     65 static inline Type& StructAtOffset(void *P, unsigned int offset)
     66 { return * reinterpret_cast<Type*> ((char *) P + offset); }
     67 
     68 /* StructAfter<T>(X) returns the struct T& that is placed after X.
     69  * Works with X of variable size also.  X must implement get_size() */
     70 template<typename Type, typename TObject>
     71 static inline const Type& StructAfter(const TObject &X)
     72 { return StructAtOffset<Type>(&X, X.get_size()); }
     73 template<typename Type, typename TObject>
     74 static inline Type& StructAfter(TObject &X)
     75 { return StructAtOffset<Type>(&X, X.get_size()); }
     76 
     77 
     78 
     79 /*
     80  * Size checking
     81  */
     82 
     83 /* Check _assertion in a method environment */
     84 #define _DEFINE_INSTANCE_ASSERTION1(_line, _assertion) \
     85   inline void _instance_assertion_on_line_##_line (void) const \
     86   { \
     87     ASSERT_STATIC (_assertion); \
     88     ASSERT_INSTANCE_POD (*this); /* Make sure it's POD. */ \
     89   }
     90 # define _DEFINE_INSTANCE_ASSERTION0(_line, _assertion) _DEFINE_INSTANCE_ASSERTION1 (_line, _assertion)
     91 # define DEFINE_INSTANCE_ASSERTION(_assertion) _DEFINE_INSTANCE_ASSERTION0 (__LINE__, _assertion)
     92 
     93 /* Check that _code compiles in a method environment */
     94 #define _DEFINE_COMPILES_ASSERTION1(_line, _code) \
     95   inline void _compiles_assertion_on_line_##_line (void) const \
     96   { _code; }
     97 # define _DEFINE_COMPILES_ASSERTION0(_line, _code) _DEFINE_COMPILES_ASSERTION1 (_line, _code)
     98 # define DEFINE_COMPILES_ASSERTION(_code) _DEFINE_COMPILES_ASSERTION0 (__LINE__, _code)
     99 
    100 
    101 #define DEFINE_SIZE_STATIC(size) \
    102   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size)); \
    103   static const unsigned int static_size = (size); \
    104   static const unsigned int min_size = (size)
    105 
    106 /* Size signifying variable-sized array */
    107 #define VAR 1
    108 
    109 #define DEFINE_SIZE_UNION(size, _member) \
    110   DEFINE_INSTANCE_ASSERTION (this->u._member.static_size == (size)); \
    111   static const unsigned int min_size = (size)
    112 
    113 #define DEFINE_SIZE_MIN(size) \
    114   DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)); \
    115   static const unsigned int min_size = (size)
    116 
    117 #define DEFINE_SIZE_ARRAY(size, array) \
    118   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (array[0])); \
    119   DEFINE_COMPILES_ASSERTION ((void) array[0].static_size) \
    120   static const unsigned int min_size = (size)
    121 
    122 #define DEFINE_SIZE_ARRAY2(size, array1, array2) \
    123   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (this->array1[0]) + sizeof (this->array2[0])); \
    124   DEFINE_COMPILES_ASSERTION ((void) array1[0].static_size; (void) array2[0].static_size) \
    125   static const unsigned int min_size = (size)
    126 
    127 
    128 
    129 /*
    130  * Null objects
    131  */
    132 
    133 /* Global nul-content Null pool.  Enlarge as necessary. */
    134 /* TODO This really should be a extern HB_INTERNAL and defined somewhere... */
    135 static const void *_NullPool[(256+8) / sizeof (void *)];
    136 
    137 /* Generic nul-content Null objects. */
    138 template <typename Type>
    139 static inline const Type& Null (void) {
    140   ASSERT_STATIC (sizeof (Type) <= sizeof (_NullPool));
    141   return *CastP<Type> (_NullPool);
    142 }
    143 
    144 /* Specializaiton for arbitrary-content arbitrary-sized Null objects. */
    145 #define DEFINE_NULL_DATA(Type, data) \
    146 static const char _Null##Type[sizeof (Type) + 1] = data; /* +1 is for nul-termination in data */ \
    147 template <> \
    148 /*static*/ inline const Type& Null<Type> (void) { \
    149   return *CastP<Type> (_Null##Type); \
    150 } /* The following line really exists such that we end in a place needing semicolon */ \
    151 ASSERT_STATIC (Type::min_size + 1 <= sizeof (_Null##Type))
    152 
    153 /* Accessor macro. */
    154 #define Null(Type) Null<Type>()
    155 
    156 
    157 
    158 /*
    159  * Sanitize
    160  */
    161 
    162 #ifndef HB_DEBUG_SANITIZE
    163 #define HB_DEBUG_SANITIZE (HB_DEBUG+0)
    164 #endif
    165 
    166 
    167 #define TRACE_SANITIZE(this) \
    168 	hb_auto_trace_t<HB_DEBUG_SANITIZE, bool> trace \
    169 	(&c->debug_depth, c->get_name (), this, HB_FUNC, \
    170 	 "");
    171 
    172 /* This limits sanitizing time on really broken fonts. */
    173 #ifndef HB_SANITIZE_MAX_EDITS
    174 #define HB_SANITIZE_MAX_EDITS 100
    175 #endif
    176 
    177 struct hb_sanitize_context_t
    178 {
    179   inline const char *get_name (void) { return "SANITIZE"; }
    180   static const unsigned int max_debug_depth = HB_DEBUG_SANITIZE;
    181   typedef bool return_t;
    182   template <typename T>
    183   inline return_t dispatch (const T &obj) { return obj.sanitize (this); }
    184   static return_t default_return_value (void) { return true; }
    185   bool stop_sublookup_iteration (const return_t r HB_UNUSED) const { return false; }
    186 
    187   inline void init (hb_blob_t *b)
    188   {
    189     this->blob = hb_blob_reference (b);
    190     this->writable = false;
    191   }
    192 
    193   inline void start_processing (void)
    194   {
    195     this->start = hb_blob_get_data (this->blob, NULL);
    196     this->end = this->start + hb_blob_get_length (this->blob);
    197     this->edit_count = 0;
    198     this->debug_depth = 0;
    199 
    200     DEBUG_MSG_LEVEL (SANITIZE, this->blob, 0, +1,
    201 		     "start [%p..%p] (%lu bytes)",
    202 		     this->start, this->end,
    203 		     (unsigned long) (this->end - this->start));
    204   }
    205 
    206   inline void end_processing (void)
    207   {
    208     DEBUG_MSG_LEVEL (SANITIZE, this->blob, 0, -1,
    209 		     "end [%p..%p] %u edit requests",
    210 		     this->start, this->end, this->edit_count);
    211 
    212     hb_blob_destroy (this->blob);
    213     this->blob = NULL;
    214     this->start = this->end = NULL;
    215   }
    216 
    217   inline bool check_range (const void *base, unsigned int len) const
    218   {
    219     const char *p = (const char *) base;
    220 
    221     hb_auto_trace_t<HB_DEBUG_SANITIZE, bool> trace
    222       (&this->debug_depth, "SANITIZE", this->blob, NULL,
    223        "check_range [%p..%p] (%d bytes) in [%p..%p]",
    224        p, p + len, len,
    225        this->start, this->end);
    226 
    227     return TRACE_RETURN (likely (this->start <= p && p <= this->end && (unsigned int) (this->end - p) >= len));
    228   }
    229 
    230   inline bool check_array (const void *base, unsigned int record_size, unsigned int len) const
    231   {
    232     const char *p = (const char *) base;
    233     bool overflows = _hb_unsigned_int_mul_overflows (len, record_size);
    234 
    235     hb_auto_trace_t<HB_DEBUG_SANITIZE, bool> trace
    236       (&this->debug_depth, "SANITIZE", this->blob, NULL,
    237        "check_array [%p..%p] (%d*%d=%ld bytes) in [%p..%p]",
    238        p, p + (record_size * len), record_size, len, (unsigned long) record_size * len,
    239        this->start, this->end);
    240 
    241     return TRACE_RETURN (likely (!overflows && this->check_range (base, record_size * len)));
    242   }
    243 
    244   template <typename Type>
    245   inline bool check_struct (const Type *obj) const
    246   {
    247     return likely (this->check_range (obj, obj->min_size));
    248   }
    249 
    250   inline bool may_edit (const void *base HB_UNUSED, unsigned int len HB_UNUSED)
    251   {
    252     if (this->edit_count >= HB_SANITIZE_MAX_EDITS)
    253       return false;
    254 
    255     const char *p = (const char *) base;
    256     this->edit_count++;
    257 
    258     hb_auto_trace_t<HB_DEBUG_SANITIZE, bool> trace
    259       (&this->debug_depth, "SANITIZE", this->blob, NULL,
    260        "may_edit(%u) [%p..%p] (%d bytes) in [%p..%p] -> %s",
    261        this->edit_count,
    262        p, p + len, len,
    263        this->start, this->end,
    264        this->writable ? "GRANTED" : "DENIED");
    265 
    266     return TRACE_RETURN (this->writable);
    267   }
    268 
    269   template <typename Type, typename ValueType>
    270   inline bool try_set (Type *obj, const ValueType &v) {
    271     if (this->may_edit (obj, obj->static_size)) {
    272       obj->set (v);
    273       return true;
    274     }
    275     return false;
    276   }
    277 
    278   mutable unsigned int debug_depth;
    279   const char *start, *end;
    280   bool writable;
    281   unsigned int edit_count;
    282   hb_blob_t *blob;
    283 };
    284 
    285 
    286 
    287 /* Template to sanitize an object. */
    288 template <typename Type>
    289 struct Sanitizer
    290 {
    291   static hb_blob_t *sanitize (hb_blob_t *blob) {
    292     hb_sanitize_context_t c[1] = {{0, NULL, NULL, false, 0, NULL}};
    293     bool sane;
    294 
    295     /* TODO is_sane() stuff */
    296 
    297     c->init (blob);
    298 
    299   retry:
    300     DEBUG_MSG_FUNC (SANITIZE, blob, "start");
    301 
    302     c->start_processing ();
    303 
    304     if (unlikely (!c->start)) {
    305       c->end_processing ();
    306       return blob;
    307     }
    308 
    309     Type *t = CastP<Type> (const_cast<char *> (c->start));
    310 
    311     sane = t->sanitize (c);
    312     if (sane) {
    313       if (c->edit_count) {
    314 	DEBUG_MSG_FUNC (SANITIZE, blob, "passed first round with %d edits; going for second round", c->edit_count);
    315 
    316         /* sanitize again to ensure no toe-stepping */
    317         c->edit_count = 0;
    318 	sane = t->sanitize (c);
    319 	if (c->edit_count) {
    320 	  DEBUG_MSG_FUNC (SANITIZE, blob, "requested %d edits in second round; FAILLING", c->edit_count);
    321 	  sane = false;
    322 	}
    323       }
    324     } else {
    325       unsigned int edit_count = c->edit_count;
    326       if (edit_count && !c->writable) {
    327         c->start = hb_blob_get_data_writable (blob, NULL);
    328 	c->end = c->start + hb_blob_get_length (blob);
    329 
    330 	if (c->start) {
    331 	  c->writable = true;
    332 	  /* ok, we made it writable by relocating.  try again */
    333 	  DEBUG_MSG_FUNC (SANITIZE, blob, "retry");
    334 	  goto retry;
    335 	}
    336       }
    337     }
    338 
    339     c->end_processing ();
    340 
    341     DEBUG_MSG_FUNC (SANITIZE, blob, sane ? "PASSED" : "FAILED");
    342     if (sane)
    343       return blob;
    344     else {
    345       hb_blob_destroy (blob);
    346       return hb_blob_get_empty ();
    347     }
    348   }
    349 
    350   static const Type* lock_instance (hb_blob_t *blob) {
    351     hb_blob_make_immutable (blob);
    352     const char *base = hb_blob_get_data (blob, NULL);
    353     return unlikely (!base) ? &Null(Type) : CastP<Type> (base);
    354   }
    355 };
    356 
    357 
    358 
    359 /*
    360  * Serialize
    361  */
    362 
    363 #ifndef HB_DEBUG_SERIALIZE
    364 #define HB_DEBUG_SERIALIZE (HB_DEBUG+0)
    365 #endif
    366 
    367 
    368 #define TRACE_SERIALIZE(this) \
    369 	hb_auto_trace_t<HB_DEBUG_SERIALIZE, bool> trace \
    370 	(&c->debug_depth, "SERIALIZE", c, HB_FUNC, \
    371 	 "");
    372 
    373 
    374 struct hb_serialize_context_t
    375 {
    376   inline hb_serialize_context_t (void *start, unsigned int size)
    377   {
    378     this->start = (char *) start;
    379     this->end = this->start + size;
    380 
    381     this->ran_out_of_room = false;
    382     this->head = this->start;
    383     this->debug_depth = 0;
    384   }
    385 
    386   template <typename Type>
    387   inline Type *start_serialize (void)
    388   {
    389     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1,
    390 		     "start [%p..%p] (%lu bytes)",
    391 		     this->start, this->end,
    392 		     (unsigned long) (this->end - this->start));
    393 
    394     return start_embed<Type> ();
    395   }
    396 
    397   inline void end_serialize (void)
    398   {
    399     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, -1,
    400 		     "end [%p..%p] serialized %d bytes; %s",
    401 		     this->start, this->end,
    402 		     (int) (this->head - this->start),
    403 		     this->ran_out_of_room ? "RAN OUT OF ROOM" : "did not ran out of room");
    404 
    405   }
    406 
    407   template <typename Type>
    408   inline Type *copy (void)
    409   {
    410     assert (!this->ran_out_of_room);
    411     unsigned int len = this->head - this->start;
    412     void *p = malloc (len);
    413     if (p)
    414       memcpy (p, this->start, len);
    415     return reinterpret_cast<Type *> (p);
    416   }
    417 
    418   template <typename Type>
    419   inline Type *allocate_size (unsigned int size)
    420   {
    421     if (unlikely (this->ran_out_of_room || this->end - this->head < ptrdiff_t (size))) {
    422       this->ran_out_of_room = true;
    423       return NULL;
    424     }
    425     memset (this->head, 0, size);
    426     char *ret = this->head;
    427     this->head += size;
    428     return reinterpret_cast<Type *> (ret);
    429   }
    430 
    431   template <typename Type>
    432   inline Type *allocate_min (void)
    433   {
    434     return this->allocate_size<Type> (Type::min_size);
    435   }
    436 
    437   template <typename Type>
    438   inline Type *start_embed (void)
    439   {
    440     Type *ret = reinterpret_cast<Type *> (this->head);
    441     return ret;
    442   }
    443 
    444   template <typename Type>
    445   inline Type *embed (const Type &obj)
    446   {
    447     unsigned int size = obj.get_size ();
    448     Type *ret = this->allocate_size<Type> (size);
    449     if (unlikely (!ret)) return NULL;
    450     memcpy (ret, obj, size);
    451     return ret;
    452   }
    453 
    454   template <typename Type>
    455   inline Type *extend_min (Type &obj)
    456   {
    457     unsigned int size = obj.min_size;
    458     assert (this->start <= (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
    459     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL;
    460     return reinterpret_cast<Type *> (&obj);
    461   }
    462 
    463   template <typename Type>
    464   inline Type *extend (Type &obj)
    465   {
    466     unsigned int size = obj.get_size ();
    467     assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
    468     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL;
    469     return reinterpret_cast<Type *> (&obj);
    470   }
    471 
    472   inline void truncate (void *head)
    473   {
    474     assert (this->start < head && head <= this->head);
    475     this->head = (char *) head;
    476   }
    477 
    478   unsigned int debug_depth;
    479   char *start, *end, *head;
    480   bool ran_out_of_room;
    481 };
    482 
    483 template <typename Type>
    484 struct Supplier
    485 {
    486   inline Supplier (const Type *array, unsigned int len_)
    487   {
    488     head = array;
    489     len = len_;
    490   }
    491   inline const Type operator [] (unsigned int i) const
    492   {
    493     if (unlikely (i >= len)) return Type ();
    494     return head[i];
    495   }
    496 
    497   inline void advance (unsigned int count)
    498   {
    499     if (unlikely (count > len))
    500       count = len;
    501     len -= count;
    502     head += count;
    503   }
    504 
    505   private:
    506   inline Supplier (const Supplier<Type> &); /* Disallow copy */
    507   inline Supplier<Type>& operator= (const Supplier<Type> &); /* Disallow copy */
    508 
    509   unsigned int len;
    510   const Type *head;
    511 };
    512 
    513 
    514 
    515 
    516 /*
    517  *
    518  * The OpenType Font File: Data Types
    519  */
    520 
    521 
    522 /* "The following data types are used in the OpenType font file.
    523  *  All OpenType fonts use Motorola-style byte ordering (Big Endian):" */
    524 
    525 /*
    526  * Int types
    527  */
    528 
    529 
    530 template <typename Type, int Bytes> struct BEInt;
    531 
    532 template <typename Type>
    533 struct BEInt<Type, 2>
    534 {
    535   public:
    536   inline void set (Type i) { hb_be_uint16_put (v,i); }
    537   inline operator Type (void) const { return hb_be_uint16_get (v); }
    538   inline bool operator == (const BEInt<Type, 2>& o) const { return hb_be_uint16_eq (v, o.v); }
    539   inline bool operator != (const BEInt<Type, 2>& o) const { return !(*this == o); }
    540   private: uint8_t v[2];
    541 };
    542 template <typename Type>
    543 struct BEInt<Type, 4>
    544 {
    545   public:
    546   inline void set (Type i) { hb_be_uint32_put (v,i); }
    547   inline operator Type (void) const { return hb_be_uint32_get (v); }
    548   inline bool operator == (const BEInt<Type, 4>& o) const { return hb_be_uint32_eq (v, o.v); }
    549   inline bool operator != (const BEInt<Type, 4>& o) const { return !(*this == o); }
    550   private: uint8_t v[4];
    551 };
    552 template <typename Type>
    553 struct BEInt<Type, 3>
    554 {
    555   public:
    556   inline void set (Type i) { hb_be_uint24_put (v,i); }
    557   inline operator Type (void) const { return hb_be_uint24_get (v); }
    558   inline bool operator == (const BEInt<Type, 3>& o) const { return hb_be_uint24_eq (v, o.v); }
    559   inline bool operator != (const BEInt<Type, 3>& o) const { return !(*this == o); }
    560   private: uint8_t v[3];
    561 };
    562 
    563 /* Integer types in big-endian order and no alignment requirement */
    564 template <typename Type, unsigned int Size>
    565 struct IntType
    566 {
    567   inline void set (Type i) { v.set (i); }
    568   inline operator Type(void) const { return v; }
    569   inline bool operator == (const IntType<Type,Size> &o) const { return v == o.v; }
    570   inline bool operator != (const IntType<Type,Size> &o) const { return v != o.v; }
    571   static inline int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b) { return b->cmp (*a); }
    572   inline int cmp (IntType<Type,Size> va) const { Type a = va; Type b = v; return a < b ? -1 : a == b ? 0 : +1; }
    573   inline int cmp (Type a) const { Type b = v; return a < b ? -1 : a == b ? 0 : +1; }
    574   inline bool sanitize (hb_sanitize_context_t *c) {
    575     TRACE_SANITIZE (this);
    576     return TRACE_RETURN (likely (c->check_struct (this)));
    577   }
    578   protected:
    579   BEInt<Type, Size> v;
    580   public:
    581   DEFINE_SIZE_STATIC (Size);
    582 };
    583 
    584 typedef		uint8_t	     BYTE;	/* 8-bit unsigned integer. */
    585 typedef IntType<uint16_t, 2> USHORT;	/* 16-bit unsigned integer. */
    586 typedef IntType<int16_t,  2> SHORT;	/* 16-bit signed integer. */
    587 typedef IntType<uint32_t, 4> ULONG;	/* 32-bit unsigned integer. */
    588 typedef IntType<int32_t,  4> LONG;	/* 32-bit signed integer. */
    589 typedef IntType<uint32_t, 3> UINT24;	/* 24-bit unsigned integer. */
    590 
    591 /* 16-bit signed integer (SHORT) that describes a quantity in FUnits. */
    592 typedef SHORT FWORD;
    593 
    594 /* 16-bit unsigned integer (USHORT) that describes a quantity in FUnits. */
    595 typedef USHORT UFWORD;
    596 
    597 /* Date represented in number of seconds since 12:00 midnight, January 1,
    598  * 1904. The value is represented as a signed 64-bit integer. */
    599 struct LONGDATETIME
    600 {
    601   inline bool sanitize (hb_sanitize_context_t *c) {
    602     TRACE_SANITIZE (this);
    603     return TRACE_RETURN (likely (c->check_struct (this)));
    604   }
    605   protected:
    606   LONG major;
    607   ULONG minor;
    608   public:
    609   DEFINE_SIZE_STATIC (8);
    610 };
    611 
    612 /* Array of four uint8s (length = 32 bits) used to identify a script, language
    613  * system, feature, or baseline */
    614 struct Tag : ULONG
    615 {
    616   /* What the char* converters return is NOT nul-terminated.  Print using "%.4s" */
    617   inline operator const char* (void) const { return reinterpret_cast<const char *> (&this->v); }
    618   inline operator char* (void) { return reinterpret_cast<char *> (&this->v); }
    619   public:
    620   DEFINE_SIZE_STATIC (4);
    621 };
    622 DEFINE_NULL_DATA (Tag, "    ");
    623 
    624 /* Glyph index number, same as uint16 (length = 16 bits) */
    625 typedef USHORT GlyphID;
    626 
    627 /* Script/language-system/feature index */
    628 struct Index : USHORT {
    629   static const unsigned int NOT_FOUND_INDEX = 0xFFFFu;
    630 };
    631 DEFINE_NULL_DATA (Index, "\xff\xff");
    632 
    633 /* Offset, Null offset = 0 */
    634 template <typename Type=USHORT>
    635 struct Offset : Type
    636 {
    637   inline bool is_null (void) const { return 0 == *this; }
    638   public:
    639   DEFINE_SIZE_STATIC (sizeof(Type));
    640 };
    641 
    642 
    643 /* CheckSum */
    644 struct CheckSum : ULONG
    645 {
    646   /* This is reference implementation from the spec. */
    647   static inline uint32_t CalcTableChecksum (const ULONG *Table, uint32_t Length)
    648   {
    649     uint32_t Sum = 0L;
    650     const ULONG *EndPtr = Table+((Length+3) & ~3) / ULONG::static_size;
    651 
    652     while (Table < EndPtr)
    653       Sum += *Table++;
    654     return Sum;
    655   }
    656 
    657   /* Note: data should be 4byte aligned and have 4byte padding at the end. */
    658   inline void set_for_data (const void *data, unsigned int length)
    659   { set (CalcTableChecksum ((const ULONG *) data, length)); }
    660 
    661   public:
    662   DEFINE_SIZE_STATIC (4);
    663 };
    664 
    665 
    666 /*
    667  * Version Numbers
    668  */
    669 
    670 struct FixedVersion
    671 {
    672   inline uint32_t to_int (void) const { return (major << 16) + minor; }
    673 
    674   inline bool sanitize (hb_sanitize_context_t *c) {
    675     TRACE_SANITIZE (this);
    676     return TRACE_RETURN (c->check_struct (this));
    677   }
    678 
    679   USHORT major;
    680   USHORT minor;
    681   public:
    682   DEFINE_SIZE_STATIC (4);
    683 };
    684 
    685 
    686 
    687 /*
    688  * Template subclasses of Offset that do the dereferencing.
    689  * Use: (base+offset)
    690  */
    691 
    692 template <typename Type, typename OffsetType=USHORT>
    693 struct OffsetTo : Offset<OffsetType>
    694 {
    695   inline const Type& operator () (const void *base) const
    696   {
    697     unsigned int offset = *this;
    698     if (unlikely (!offset)) return Null(Type);
    699     return StructAtOffset<Type> (base, offset);
    700   }
    701 
    702   inline Type& serialize (hb_serialize_context_t *c, void *base)
    703   {
    704     Type *t = c->start_embed<Type> ();
    705     this->set ((char *) t - (char *) base); /* TODO(serialize) Overflow? */
    706     return *t;
    707   }
    708 
    709   inline bool sanitize (hb_sanitize_context_t *c, void *base) {
    710     TRACE_SANITIZE (this);
    711     if (unlikely (!c->check_struct (this))) return TRACE_RETURN (false);
    712     unsigned int offset = *this;
    713     if (unlikely (!offset)) return TRACE_RETURN (true);
    714     Type &obj = StructAtOffset<Type> (base, offset);
    715     return TRACE_RETURN (likely (obj.sanitize (c)) || neuter (c));
    716   }
    717   template <typename T>
    718   inline bool sanitize (hb_sanitize_context_t *c, void *base, T user_data) {
    719     TRACE_SANITIZE (this);
    720     if (unlikely (!c->check_struct (this))) return TRACE_RETURN (false);
    721     unsigned int offset = *this;
    722     if (unlikely (!offset)) return TRACE_RETURN (true);
    723     Type &obj = StructAtOffset<Type> (base, offset);
    724     return TRACE_RETURN (likely (obj.sanitize (c, user_data)) || neuter (c));
    725   }
    726 
    727   /* Set the offset to Null */
    728   inline bool neuter (hb_sanitize_context_t *c) {
    729     return c->try_set (this, 0);
    730   }
    731   DEFINE_SIZE_STATIC (sizeof(OffsetType));
    732 };
    733 template <typename Base, typename OffsetType, typename Type>
    734 static inline const Type& operator + (const Base &base, const OffsetTo<Type, OffsetType> &offset) { return offset (base); }
    735 template <typename Base, typename OffsetType, typename Type>
    736 static inline Type& operator + (Base &base, OffsetTo<Type, OffsetType> &offset) { return offset (base); }
    737 
    738 
    739 /*
    740  * Array Types
    741  */
    742 
    743 /* An array with a number of elements. */
    744 template <typename Type, typename LenType=USHORT>
    745 struct ArrayOf
    746 {
    747   const Type *sub_array (unsigned int start_offset, unsigned int *pcount /* IN/OUT */) const
    748   {
    749     unsigned int count = len;
    750     if (unlikely (start_offset > count))
    751       count = 0;
    752     else
    753       count -= start_offset;
    754     count = MIN (count, *pcount);
    755     *pcount = count;
    756     return array + start_offset;
    757   }
    758 
    759   inline const Type& operator [] (unsigned int i) const
    760   {
    761     if (unlikely (i >= len)) return Null(Type);
    762     return array[i];
    763   }
    764   inline Type& operator [] (unsigned int i)
    765   {
    766     return array[i];
    767   }
    768   inline unsigned int get_size (void) const
    769   { return len.static_size + len * Type::static_size; }
    770 
    771   inline bool serialize (hb_serialize_context_t *c,
    772 			 unsigned int items_len)
    773   {
    774     TRACE_SERIALIZE (this);
    775     if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
    776     len.set (items_len); /* TODO(serialize) Overflow? */
    777     if (unlikely (!c->extend (*this))) return TRACE_RETURN (false);
    778     return TRACE_RETURN (true);
    779   }
    780 
    781   inline bool serialize (hb_serialize_context_t *c,
    782 			 Supplier<Type> &items,
    783 			 unsigned int items_len)
    784   {
    785     TRACE_SERIALIZE (this);
    786     if (unlikely (!serialize (c, items_len))) return TRACE_RETURN (false);
    787     for (unsigned int i = 0; i < items_len; i++)
    788       array[i] = items[i];
    789     items.advance (items_len);
    790     return TRACE_RETURN (true);
    791   }
    792 
    793   inline bool sanitize (hb_sanitize_context_t *c) {
    794     TRACE_SANITIZE (this);
    795     if (unlikely (!sanitize_shallow (c))) return TRACE_RETURN (false);
    796 
    797     /* Note: for structs that do not reference other structs,
    798      * we do not need to call their sanitize() as we already did
    799      * a bound check on the aggregate array size.  We just include
    800      * a small unreachable expression to make sure the structs
    801      * pointed to do have a simple sanitize(), ie. they do not
    802      * reference other structs via offsets.
    803      */
    804     (void) (false && array[0].sanitize (c));
    805 
    806     return TRACE_RETURN (true);
    807   }
    808   inline bool sanitize (hb_sanitize_context_t *c, void *base) {
    809     TRACE_SANITIZE (this);
    810     if (unlikely (!sanitize_shallow (c))) return TRACE_RETURN (false);
    811     unsigned int count = len;
    812     for (unsigned int i = 0; i < count; i++)
    813       if (unlikely (!array[i].sanitize (c, base)))
    814         return TRACE_RETURN (false);
    815     return TRACE_RETURN (true);
    816   }
    817   template <typename T>
    818   inline bool sanitize (hb_sanitize_context_t *c, void *base, T user_data) {
    819     TRACE_SANITIZE (this);
    820     if (unlikely (!sanitize_shallow (c))) return TRACE_RETURN (false);
    821     unsigned int count = len;
    822     for (unsigned int i = 0; i < count; i++)
    823       if (unlikely (!array[i].sanitize (c, base, user_data)))
    824         return TRACE_RETURN (false);
    825     return TRACE_RETURN (true);
    826   }
    827 
    828   template <typename SearchType>
    829   inline int lsearch (const SearchType &x) const
    830   {
    831     unsigned int count = len;
    832     for (unsigned int i = 0; i < count; i++)
    833       if (!this->array[i].cmp (x))
    834         return i;
    835     return -1;
    836   }
    837 
    838   private:
    839   inline bool sanitize_shallow (hb_sanitize_context_t *c) {
    840     TRACE_SANITIZE (this);
    841     return TRACE_RETURN (c->check_struct (this) && c->check_array (this, Type::static_size, len));
    842   }
    843 
    844   public:
    845   LenType len;
    846   Type array[VAR];
    847   public:
    848   DEFINE_SIZE_ARRAY (sizeof (LenType), array);
    849 };
    850 
    851 /* Array of Offset's */
    852 template <typename Type>
    853 struct OffsetArrayOf : ArrayOf<OffsetTo<Type> > {};
    854 
    855 /* Array of offsets relative to the beginning of the array itself. */
    856 template <typename Type>
    857 struct OffsetListOf : OffsetArrayOf<Type>
    858 {
    859   inline const Type& operator [] (unsigned int i) const
    860   {
    861     if (unlikely (i >= this->len)) return Null(Type);
    862     return this+this->array[i];
    863   }
    864 
    865   inline bool sanitize (hb_sanitize_context_t *c) {
    866     TRACE_SANITIZE (this);
    867     return TRACE_RETURN (OffsetArrayOf<Type>::sanitize (c, this));
    868   }
    869   template <typename T>
    870   inline bool sanitize (hb_sanitize_context_t *c, T user_data) {
    871     TRACE_SANITIZE (this);
    872     return TRACE_RETURN (OffsetArrayOf<Type>::sanitize (c, this, user_data));
    873   }
    874 };
    875 
    876 
    877 /* An array starting at second element. */
    878 template <typename Type, typename LenType=USHORT>
    879 struct HeadlessArrayOf
    880 {
    881   inline const Type& operator [] (unsigned int i) const
    882   {
    883     if (unlikely (i >= len || !i)) return Null(Type);
    884     return array[i-1];
    885   }
    886   inline unsigned int get_size (void) const
    887   { return len.static_size + (len ? len - 1 : 0) * Type::static_size; }
    888 
    889   inline bool serialize (hb_serialize_context_t *c,
    890 			 Supplier<Type> &items,
    891 			 unsigned int items_len)
    892   {
    893     TRACE_SERIALIZE (this);
    894     if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
    895     len.set (items_len); /* TODO(serialize) Overflow? */
    896     if (unlikely (!items_len)) return TRACE_RETURN (true);
    897     if (unlikely (!c->extend (*this))) return TRACE_RETURN (false);
    898     for (unsigned int i = 0; i < items_len - 1; i++)
    899       array[i] = items[i];
    900     items.advance (items_len - 1);
    901     return TRACE_RETURN (true);
    902   }
    903 
    904   inline bool sanitize_shallow (hb_sanitize_context_t *c) {
    905     return c->check_struct (this)
    906 	&& c->check_array (this, Type::static_size, len);
    907   }
    908 
    909   inline bool sanitize (hb_sanitize_context_t *c) {
    910     TRACE_SANITIZE (this);
    911     if (unlikely (!sanitize_shallow (c))) return TRACE_RETURN (false);
    912 
    913     /* Note: for structs that do not reference other structs,
    914      * we do not need to call their sanitize() as we already did
    915      * a bound check on the aggregate array size.  We just include
    916      * a small unreachable expression to make sure the structs
    917      * pointed to do have a simple sanitize(), ie. they do not
    918      * reference other structs via offsets.
    919      */
    920     (void) (false && array[0].sanitize (c));
    921 
    922     return TRACE_RETURN (true);
    923   }
    924 
    925   LenType len;
    926   Type array[VAR];
    927   public:
    928   DEFINE_SIZE_ARRAY (sizeof (LenType), array);
    929 };
    930 
    931 
    932 /* An array with sorted elements.  Supports binary searching. */
    933 template <typename Type, typename LenType=USHORT>
    934 struct SortedArrayOf : ArrayOf<Type, LenType>
    935 {
    936   template <typename SearchType>
    937   inline int bsearch (const SearchType &x) const
    938   {
    939     /* Hand-coded bsearch here since this is in the hot inner loop. */
    940     int min = 0, max = (int) this->len - 1;
    941     while (min <= max)
    942     {
    943       int mid = (min + max) / 2;
    944       int c = this->array[mid].cmp (x);
    945       if (c < 0)
    946         max = mid - 1;
    947       else if (c > 0)
    948         min = mid + 1;
    949       else
    950         return mid;
    951     }
    952     return -1;
    953   }
    954 };
    955 
    956 
    957 } /* namespace OT */
    958 
    959 
    960 #endif /* HB_OPEN_TYPE_PRIVATE_HH */
    961