Home | History | Annotate | Download | only in src
      1 /*
      2  * Copyright  2007,2008,2009,2010  Red Hat, Inc.
      3  * Copyright  2012,2018  Google, Inc.
      4  *
      5  *  This is part of HarfBuzz, a text shaping library.
      6  *
      7  * Permission is hereby granted, without written agreement and without
      8  * license or royalty fees, to use, copy, modify, and distribute this
      9  * software and its documentation for any purpose, provided that the
     10  * above copyright notice and the following two paragraphs appear in
     11  * all copies of this software.
     12  *
     13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
     14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
     15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
     16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
     17  * DAMAGE.
     18  *
     19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
     20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
     21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
     22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
     23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
     24  *
     25  * Red Hat Author(s): Behdad Esfahbod
     26  * Google Author(s): Behdad Esfahbod
     27  */
     28 
     29 #ifndef HB_MACHINERY_HH
     30 #define HB_MACHINERY_HH
     31 
     32 #include "hb.hh"
     33 #include "hb-blob.hh"
     34 
     35 #include "hb-array.hh"
     36 #include "hb-vector.hh"
     37 
     38 
     39 /*
     40  * Casts
     41  */
     42 
     43 /* Cast to struct T, reference to reference */
     44 template<typename Type, typename TObject>
     45 static inline const Type& CastR(const TObject &X)
     46 { return reinterpret_cast<const Type&> (X); }
     47 template<typename Type, typename TObject>
     48 static inline Type& CastR(TObject &X)
     49 { return reinterpret_cast<Type&> (X); }
     50 
     51 /* Cast to struct T, pointer to pointer */
     52 template<typename Type, typename TObject>
     53 static inline const Type* CastP(const TObject *X)
     54 { return reinterpret_cast<const Type*> (X); }
     55 template<typename Type, typename TObject>
     56 static inline Type* CastP(TObject *X)
     57 { return reinterpret_cast<Type*> (X); }
     58 
     59 /* StructAtOffset<T>(P,Ofs) returns the struct T& that is placed at memory
     60  * location pointed to by P plus Ofs bytes. */
     61 template<typename Type>
     62 static inline const Type& StructAtOffset(const void *P, unsigned int offset)
     63 { return * reinterpret_cast<const Type*> ((const char *) P + offset); }
     64 template<typename Type>
     65 static inline Type& StructAtOffset(void *P, unsigned int offset)
     66 { return * reinterpret_cast<Type*> ((char *) P + offset); }
     67 
     68 /* StructAfter<T>(X) returns the struct T& that is placed after X.
     69  * Works with X of variable size also.  X must implement get_size() */
     70 template<typename Type, typename TObject>
     71 static inline const Type& StructAfter(const TObject &X)
     72 { return StructAtOffset<Type>(&X, X.get_size()); }
     73 template<typename Type, typename TObject>
     74 static inline Type& StructAfter(TObject &X)
     75 { return StructAtOffset<Type>(&X, X.get_size()); }
     76 
     77 
     78 /*
     79  * Size checking
     80  */
     81 
     82 /* Check _assertion in a method environment */
     83 #define _DEFINE_INSTANCE_ASSERTION1(_line, _assertion) \
     84   void _instance_assertion_on_line_##_line () const \
     85   { static_assert ((_assertion), ""); }
     86 # define _DEFINE_INSTANCE_ASSERTION0(_line, _assertion) _DEFINE_INSTANCE_ASSERTION1 (_line, _assertion)
     87 # define DEFINE_INSTANCE_ASSERTION(_assertion) _DEFINE_INSTANCE_ASSERTION0 (__LINE__, _assertion)
     88 
     89 /* Check that _code compiles in a method environment */
     90 #define _DEFINE_COMPILES_ASSERTION1(_line, _code) \
     91   void _compiles_assertion_on_line_##_line () const \
     92   { _code; }
     93 # define _DEFINE_COMPILES_ASSERTION0(_line, _code) _DEFINE_COMPILES_ASSERTION1 (_line, _code)
     94 # define DEFINE_COMPILES_ASSERTION(_code) _DEFINE_COMPILES_ASSERTION0 (__LINE__, _code)
     95 
     96 
     97 #define DEFINE_SIZE_STATIC(size) \
     98   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size)) \
     99   unsigned int get_size () const { return (size); } \
    100   enum { null_size = (size) }; \
    101   enum { min_size = (size) }; \
    102   enum { static_size = (size) }
    103 
    104 #define DEFINE_SIZE_UNION(size, _member) \
    105   DEFINE_COMPILES_ASSERTION ((void) this->u._member.static_size) \
    106   DEFINE_INSTANCE_ASSERTION (sizeof(this->u._member) == (size)) \
    107   enum { null_size = (size) }; \
    108   enum { min_size = (size) }
    109 
    110 #define DEFINE_SIZE_MIN(size) \
    111   DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)) \
    112   enum { null_size = (size) }; \
    113   enum { min_size = (size) }
    114 
    115 #define DEFINE_SIZE_UNBOUNDED(size) \
    116   DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)) \
    117   enum { min_size = (size) }
    118 
    119 #define DEFINE_SIZE_ARRAY(size, array) \
    120   DEFINE_COMPILES_ASSERTION ((void) (array)[0].static_size) \
    121   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + VAR * sizeof ((array)[0])) \
    122   enum { null_size = (size) }; \
    123   enum { min_size = (size) }
    124 
    125 #define DEFINE_SIZE_ARRAY_SIZED(size, array) \
    126   unsigned int get_size () const { return (size - (array).min_size + (array).get_size ()); } \
    127   DEFINE_SIZE_ARRAY(size, array)
    128 
    129 
    130 /*
    131  * Dispatch
    132  */
    133 
    134 template <typename Context, typename Return, unsigned int MaxDebugDepth>
    135 struct hb_dispatch_context_t
    136 {
    137   enum { max_debug_depth = MaxDebugDepth };
    138   typedef Return return_t;
    139   template <typename T, typename F>
    140   bool may_dispatch (const T *obj HB_UNUSED, const F *format HB_UNUSED) { return true; }
    141   static return_t no_dispatch_return_value () { return Context::default_return_value (); }
    142   static bool stop_sublookup_iteration (const return_t r HB_UNUSED) { return false; }
    143 };
    144 
    145 
    146 /*
    147  * Sanitize
    148  *
    149  *
    150  * === Introduction ===
    151  *
    152  * The sanitize machinery is at the core of our zero-cost font loading.  We
    153  * mmap() font file into memory and create a blob out of it.  Font subtables
    154  * are returned as a readonly sub-blob of the main font blob.  These table
    155  * blobs are then sanitized before use, to ensure invalid memory access does
    156  * not happen.  The toplevel sanitize API use is like, eg. to load the 'head'
    157  * table:
    158  *
    159  *   hb_blob_t *head_blob = hb_sanitize_context_t ().reference_table<OT::head> (face);
    160  *
    161  * The blob then can be converted to a head table struct with:
    162  *
    163  *   const head *head_table = head_blob->as<head> ();
    164  *
    165  * What the reference_table does is, to call hb_face_reference_table() to load
    166  * the table blob, sanitize it and return either the sanitized blob, or empty
    167  * blob if sanitization failed.  The blob->as() function returns the null
    168  * object of its template type argument if the blob is empty.  Otherwise, it
    169  * just casts the blob contents to the desired type.
    170  *
    171  * Sanitizing a blob of data with a type T works as follows (with minor
    172  * simplification):
    173  *
    174  *   - Cast blob content to T*, call sanitize() method of it,
    175  *   - If sanitize succeeded, return blob.
    176  *   - Otherwise, if blob is not writable, try making it writable,
    177  *     or copy if cannot be made writable in-place,
    178  *   - Call sanitize() again.  Return blob if sanitize succeeded.
    179  *   - Return empty blob otherwise.
    180  *
    181  *
    182  * === The sanitize() contract ===
    183  *
    184  * The sanitize() method of each object type shall return true if it's safe to
    185  * call other methods of the object, and false otherwise.
    186  *
    187  * Note that what sanitize() checks for might align with what the specification
    188  * describes as valid table data, but does not have to be.  In particular, we
    189  * do NOT want to be pedantic and concern ourselves with validity checks that
    190  * are irrelevant to our use of the table.  On the contrary, we want to be
    191  * lenient with error handling and accept invalid data to the extent that it
    192  * does not impose extra burden on us.
    193  *
    194  * Based on the sanitize contract, one can see that what we check for depends
    195  * on how we use the data in other table methods.  Ie. if other table methods
    196  * assume that offsets do NOT point out of the table data block, then that's
    197  * something sanitize() must check for (GSUB/GPOS/GDEF/etc work this way).  On
    198  * the other hand, if other methods do such checks themselves, then sanitize()
    199  * does not have to bother with them (glyf/local work this way).  The choice
    200  * depends on the table structure and sanitize() performance.  For example, to
    201  * check glyf/loca offsets in sanitize() would cost O(num-glyphs).  We try hard
    202  * to avoid such costs during font loading.  By postponing such checks to the
    203  * actual glyph loading, we reduce the sanitize cost to O(1) and total runtime
    204  * cost to O(used-glyphs).  As such, this is preferred.
    205  *
    206  * The same argument can be made re GSUB/GPOS/GDEF, but there, the table
    207  * structure is so complicated that by checking all offsets at sanitize() time,
    208  * we make the code much simpler in other methods, as offsets and referenced
    209  * objects do not need to be validated at each use site.
    210  */
    211 
    212 /* This limits sanitizing time on really broken fonts. */
    213 #ifndef HB_SANITIZE_MAX_EDITS
    214 #define HB_SANITIZE_MAX_EDITS 32
    215 #endif
    216 #ifndef HB_SANITIZE_MAX_OPS_FACTOR
    217 #define HB_SANITIZE_MAX_OPS_FACTOR 8
    218 #endif
    219 #ifndef HB_SANITIZE_MAX_OPS_MIN
    220 #define HB_SANITIZE_MAX_OPS_MIN 16384
    221 #endif
    222 #ifndef HB_SANITIZE_MAX_OPS_MAX
    223 #define HB_SANITIZE_MAX_OPS_MAX 0x3FFFFFFF
    224 #endif
    225 
    226 struct hb_sanitize_context_t :
    227        hb_dispatch_context_t<hb_sanitize_context_t, bool, HB_DEBUG_SANITIZE>
    228 {
    229   hb_sanitize_context_t () :
    230 	debug_depth (0),
    231 	start (nullptr), end (nullptr),
    232 	max_ops (0),
    233 	writable (false), edit_count (0),
    234 	blob (nullptr),
    235 	num_glyphs (65536),
    236 	num_glyphs_set (false) {}
    237 
    238   const char *get_name () { return "SANITIZE"; }
    239   template <typename T, typename F>
    240   bool may_dispatch (const T *obj HB_UNUSED, const F *format)
    241   { return format->sanitize (this); }
    242   template <typename T>
    243   return_t dispatch (const T &obj) { return obj.sanitize (this); }
    244   static return_t default_return_value () { return true; }
    245   static return_t no_dispatch_return_value () { return false; }
    246   bool stop_sublookup_iteration (const return_t r) const { return !r; }
    247 
    248   void init (hb_blob_t *b)
    249   {
    250     this->blob = hb_blob_reference (b);
    251     this->writable = false;
    252   }
    253 
    254   void set_num_glyphs (unsigned int num_glyphs_)
    255   {
    256     num_glyphs = num_glyphs_;
    257     num_glyphs_set = true;
    258   }
    259   unsigned int get_num_glyphs () { return num_glyphs; }
    260 
    261   void set_max_ops (int max_ops_) { max_ops = max_ops_; }
    262 
    263   template <typename T>
    264   void set_object (const T *obj)
    265   {
    266     reset_object ();
    267 
    268     if (!obj) return;
    269 
    270     const char *obj_start = (const char *) obj;
    271     const char *obj_end = (const char *) obj + obj->get_size ();
    272     assert (obj_start <= obj_end); /* Must not overflow. */
    273 
    274     if (unlikely (obj_end < this->start || this->end < obj_start))
    275       this->start = this->end = nullptr;
    276     else
    277     {
    278       this->start = MAX (this->start, obj_start);
    279       this->end   = MIN (this->end  , obj_end  );
    280     }
    281   }
    282 
    283   void reset_object ()
    284   {
    285     this->start = this->blob->data;
    286     this->end = this->start + this->blob->length;
    287     assert (this->start <= this->end); /* Must not overflow. */
    288   }
    289 
    290   void start_processing ()
    291   {
    292     reset_object ();
    293     this->max_ops = MAX ((unsigned int) (this->end - this->start) * HB_SANITIZE_MAX_OPS_FACTOR,
    294 			 (unsigned) HB_SANITIZE_MAX_OPS_MIN);
    295     this->edit_count = 0;
    296     this->debug_depth = 0;
    297 
    298     DEBUG_MSG_LEVEL (SANITIZE, start, 0, +1,
    299 		     "start [%p..%p] (%lu bytes)",
    300 		     this->start, this->end,
    301 		     (unsigned long) (this->end - this->start));
    302   }
    303 
    304   void end_processing ()
    305   {
    306     DEBUG_MSG_LEVEL (SANITIZE, this->start, 0, -1,
    307 		     "end [%p..%p] %u edit requests",
    308 		     this->start, this->end, this->edit_count);
    309 
    310     hb_blob_destroy (this->blob);
    311     this->blob = nullptr;
    312     this->start = this->end = nullptr;
    313   }
    314 
    315   bool check_range (const void *base,
    316 			   unsigned int len) const
    317   {
    318     const char *p = (const char *) base;
    319     bool ok = this->start <= p &&
    320 	      p <= this->end &&
    321 	      (unsigned int) (this->end - p) >= len &&
    322 	      this->max_ops-- > 0;
    323 
    324     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
    325        "check_range [%p..%p] (%d bytes) in [%p..%p] -> %s",
    326        p, p + len, len,
    327        this->start, this->end,
    328        ok ? "OK" : "OUT-OF-RANGE");
    329 
    330     return likely (ok);
    331   }
    332 
    333   template <typename T>
    334   bool check_range (const T *base,
    335 			   unsigned int a,
    336 			   unsigned int b) const
    337   {
    338     return !hb_unsigned_mul_overflows (a, b) &&
    339 	   this->check_range (base, a * b);
    340   }
    341 
    342   template <typename T>
    343   bool check_range (const T *base,
    344 			   unsigned int a,
    345 			   unsigned int b,
    346 			   unsigned int c) const
    347   {
    348     return !hb_unsigned_mul_overflows (a, b) &&
    349 	   this->check_range (base, a * b, c);
    350   }
    351 
    352   template <typename T>
    353   bool check_array (const T *base, unsigned int len) const
    354   {
    355     return this->check_range (base, len, hb_static_size (T));
    356   }
    357 
    358   template <typename T>
    359   bool check_array (const T *base,
    360 		    unsigned int a,
    361 		    unsigned int b) const
    362   {
    363     return this->check_range (base, a, b, hb_static_size (T));
    364   }
    365 
    366   template <typename Type>
    367   bool check_struct (const Type *obj) const
    368   { return likely (this->check_range (obj, obj->min_size)); }
    369 
    370   bool may_edit (const void *base, unsigned int len)
    371   {
    372     if (this->edit_count >= HB_SANITIZE_MAX_EDITS)
    373       return false;
    374 
    375     const char *p = (const char *) base;
    376     this->edit_count++;
    377 
    378     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
    379        "may_edit(%u) [%p..%p] (%d bytes) in [%p..%p] -> %s",
    380        this->edit_count,
    381        p, p + len, len,
    382        this->start, this->end,
    383        this->writable ? "GRANTED" : "DENIED");
    384 
    385     return this->writable;
    386   }
    387 
    388   template <typename Type, typename ValueType>
    389   bool try_set (const Type *obj, const ValueType &v)
    390   {
    391     if (this->may_edit (obj, hb_static_size (Type)))
    392     {
    393       hb_assign (* const_cast<Type *> (obj), v);
    394       return true;
    395     }
    396     return false;
    397   }
    398 
    399   template <typename Type>
    400   hb_blob_t *sanitize_blob (hb_blob_t *blob)
    401   {
    402     bool sane;
    403 
    404     init (blob);
    405 
    406   retry:
    407     DEBUG_MSG_FUNC (SANITIZE, start, "start");
    408 
    409     start_processing ();
    410 
    411     if (unlikely (!start))
    412     {
    413       end_processing ();
    414       return blob;
    415     }
    416 
    417     Type *t = CastP<Type> (const_cast<char *> (start));
    418 
    419     sane = t->sanitize (this);
    420     if (sane)
    421     {
    422       if (edit_count)
    423       {
    424 	DEBUG_MSG_FUNC (SANITIZE, start, "passed first round with %d edits; going for second round", edit_count);
    425 
    426         /* sanitize again to ensure no toe-stepping */
    427         edit_count = 0;
    428 	sane = t->sanitize (this);
    429 	if (edit_count) {
    430 	  DEBUG_MSG_FUNC (SANITIZE, start, "requested %d edits in second round; FAILLING", edit_count);
    431 	  sane = false;
    432 	}
    433       }
    434     }
    435     else
    436     {
    437       if (edit_count && !writable) {
    438         start = hb_blob_get_data_writable (blob, nullptr);
    439 	end = start + blob->length;
    440 
    441 	if (start)
    442 	{
    443 	  writable = true;
    444 	  /* ok, we made it writable by relocating.  try again */
    445 	  DEBUG_MSG_FUNC (SANITIZE, start, "retry");
    446 	  goto retry;
    447 	}
    448       }
    449     }
    450 
    451     end_processing ();
    452 
    453     DEBUG_MSG_FUNC (SANITIZE, start, sane ? "PASSED" : "FAILED");
    454     if (sane)
    455     {
    456       hb_blob_make_immutable (blob);
    457       return blob;
    458     }
    459     else
    460     {
    461       hb_blob_destroy (blob);
    462       return hb_blob_get_empty ();
    463     }
    464   }
    465 
    466   template <typename Type>
    467   hb_blob_t *reference_table (const hb_face_t *face, hb_tag_t tableTag = Type::tableTag)
    468   {
    469     if (!num_glyphs_set)
    470       set_num_glyphs (hb_face_get_glyph_count (face));
    471     return sanitize_blob<Type> (hb_face_reference_table (face, tableTag));
    472   }
    473 
    474   mutable unsigned int debug_depth;
    475   const char *start, *end;
    476   mutable int max_ops;
    477   private:
    478   bool writable;
    479   unsigned int edit_count;
    480   hb_blob_t *blob;
    481   unsigned int num_glyphs;
    482   bool  num_glyphs_set;
    483 };
    484 
    485 struct hb_sanitize_with_object_t
    486 {
    487   template <typename T>
    488   hb_sanitize_with_object_t (hb_sanitize_context_t *c,
    489 				    const T& obj) : c (c)
    490   { c->set_object (obj); }
    491   ~hb_sanitize_with_object_t ()
    492   { c->reset_object (); }
    493 
    494   private:
    495   hb_sanitize_context_t *c;
    496 };
    497 
    498 
    499 /*
    500  * Serialize
    501  */
    502 
    503 struct hb_serialize_context_t
    504 {
    505   hb_serialize_context_t (void *start_, unsigned int size)
    506   {
    507     this->start = (char *) start_;
    508     this->end = this->start + size;
    509     reset ();
    510   }
    511 
    512   bool in_error () const { return !this->successful; }
    513 
    514   void reset ()
    515   {
    516     this->successful = true;
    517     this->head = this->start;
    518     this->debug_depth = 0;
    519   }
    520 
    521   bool propagate_error (bool e)
    522   { return this->successful = this->successful && e; }
    523   template <typename T> bool propagate_error (const T &obj)
    524   { return this->successful = this->successful && !obj.in_error (); }
    525   template <typename T> bool propagate_error (const T *obj)
    526   { return this->successful = this->successful && !obj->in_error (); }
    527   template <typename T1, typename T2> bool propagate_error (T1 &o1, T2 &o2)
    528   { return propagate_error (o1) && propagate_error (o2); }
    529   template <typename T1, typename T2> bool propagate_error (T1 *o1, T2 *o2)
    530   { return propagate_error (o1) && propagate_error (o2); }
    531   template <typename T1, typename T2, typename T3>
    532   bool propagate_error (T1 &o1, T2 &o2, T3 &o3)
    533   { return propagate_error (o1) && propagate_error (o2, o3); }
    534   template <typename T1, typename T2, typename T3>
    535   bool propagate_error (T1 *o1, T2 *o2, T3 *o3)
    536   { return propagate_error (o1) && propagate_error (o2, o3); }
    537 
    538   /* To be called around main operation. */
    539   template <typename Type>
    540   Type *start_serialize ()
    541   {
    542     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1,
    543 		     "start [%p..%p] (%lu bytes)",
    544 		     this->start, this->end,
    545 		     (unsigned long) (this->end - this->start));
    546 
    547     return start_embed<Type> ();
    548   }
    549   void end_serialize ()
    550   {
    551     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, -1,
    552 		     "end [%p..%p] serialized %d bytes; %s",
    553 		     this->start, this->end,
    554 		     (int) (this->head - this->start),
    555 		     this->successful ? "successful" : "UNSUCCESSFUL");
    556   }
    557 
    558   unsigned int length () const { return this->head - this->start; }
    559 
    560   void align (unsigned int alignment)
    561   {
    562     unsigned int l = length () % alignment;
    563     if (l)
    564       allocate_size<void> (alignment - l);
    565   }
    566 
    567   template <typename Type>
    568   Type *start_embed (const Type *_ HB_UNUSED = nullptr) const
    569   {
    570     Type *ret = reinterpret_cast<Type *> (this->head);
    571     return ret;
    572   }
    573 
    574   template <typename Type>
    575   Type *allocate_size (unsigned int size)
    576   {
    577     if (unlikely (!this->successful || this->end - this->head < ptrdiff_t (size))) {
    578       this->successful = false;
    579       return nullptr;
    580     }
    581     memset (this->head, 0, size);
    582     char *ret = this->head;
    583     this->head += size;
    584     return reinterpret_cast<Type *> (ret);
    585   }
    586 
    587   template <typename Type>
    588   Type *allocate_min ()
    589   {
    590     return this->allocate_size<Type> (Type::min_size);
    591   }
    592 
    593   template <typename Type>
    594   Type *embed (const Type &obj)
    595   {
    596     unsigned int size = obj.get_size ();
    597     Type *ret = this->allocate_size<Type> (size);
    598     if (unlikely (!ret)) return nullptr;
    599     memcpy (ret, &obj, size);
    600     return ret;
    601   }
    602   template <typename Type>
    603   hb_serialize_context_t &operator << (const Type &obj) { embed (obj); return *this; }
    604 
    605   template <typename Type>
    606   Type *extend_size (Type &obj, unsigned int size)
    607   {
    608     assert (this->start <= (char *) &obj);
    609     assert ((char *) &obj <= this->head);
    610     assert ((char *) &obj + size >= this->head);
    611     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return nullptr;
    612     return reinterpret_cast<Type *> (&obj);
    613   }
    614 
    615   template <typename Type>
    616   Type *extend_min (Type &obj) { return extend_size (obj, obj.min_size); }
    617 
    618   template <typename Type>
    619   Type *extend (Type &obj) { return extend_size (obj, obj.get_size ()); }
    620 
    621   /* Output routines. */
    622   template <typename Type>
    623   Type *copy () const
    624   {
    625     assert (this->successful);
    626     unsigned int len = this->head - this->start;
    627     void *p = malloc (len);
    628     if (p)
    629       memcpy (p, this->start, len);
    630     return reinterpret_cast<Type *> (p);
    631   }
    632   hb_bytes_t copy_bytes () const
    633   {
    634     assert (this->successful);
    635     unsigned int len = this->head - this->start;
    636     void *p = malloc (len);
    637     if (p)
    638       memcpy (p, this->start, len);
    639     else
    640       return hb_bytes_t ();
    641     return hb_bytes_t ((char *) p, len);
    642   }
    643   hb_blob_t *copy_blob () const
    644   {
    645     assert (this->successful);
    646     return hb_blob_create (this->start,
    647 			   this->head - this->start,
    648 			   HB_MEMORY_MODE_DUPLICATE,
    649 			   nullptr, nullptr);
    650   }
    651 
    652   public:
    653   unsigned int debug_depth;
    654   char *start, *end, *head;
    655   bool successful;
    656 };
    657 
    658 
    659 
    660 /*
    661  * Big-endian integers.
    662  */
    663 
    664 template <typename Type, int Bytes> struct BEInt;
    665 
    666 template <typename Type>
    667 struct BEInt<Type, 1>
    668 {
    669   public:
    670   typedef Type type;
    671   void set (Type V)      { v = V; }
    672   operator Type () const { return v; }
    673   private: uint8_t v;
    674 };
    675 template <typename Type>
    676 struct BEInt<Type, 2>
    677 {
    678   public:
    679   typedef Type type;
    680   void set (Type V)
    681   {
    682     v[0] = (V >>  8) & 0xFF;
    683     v[1] = (V      ) & 0xFF;
    684   }
    685   operator Type () const
    686   {
    687 #if ((defined(__GNUC__) && __GNUC__ >= 5) || defined(__clang__)) && \
    688     defined(__BYTE_ORDER) && \
    689     (__BYTE_ORDER == __LITTLE_ENDIAN || __BYTE_ORDER == __BIG_ENDIAN)
    690     /* Spoon-feed the compiler a big-endian integer with alignment 1.
    691      * https://github.com/harfbuzz/harfbuzz/pull/1398 */
    692     struct __attribute__((packed)) packed_uint16_t { uint16_t v; };
    693 #if __BYTE_ORDER == __LITTLE_ENDIAN
    694     return __builtin_bswap16 (((packed_uint16_t *) this)->v);
    695 #else /* __BYTE_ORDER == __BIG_ENDIAN */
    696     return ((packed_uint16_t *) this)->v;
    697 #endif
    698 #endif
    699     return (v[0] <<  8)
    700          + (v[1]      );
    701   }
    702   private: uint8_t v[2];
    703 };
    704 template <typename Type>
    705 struct BEInt<Type, 3>
    706 {
    707   public:
    708   typedef Type type;
    709   void set (Type V)
    710   {
    711     v[0] = (V >> 16) & 0xFF;
    712     v[1] = (V >>  8) & 0xFF;
    713     v[2] = (V      ) & 0xFF;
    714   }
    715   operator Type () const
    716   {
    717     return (v[0] << 16)
    718          + (v[1] <<  8)
    719          + (v[2]      );
    720   }
    721   private: uint8_t v[3];
    722 };
    723 template <typename Type>
    724 struct BEInt<Type, 4>
    725 {
    726   public:
    727   typedef Type type;
    728   void set (Type V)
    729   {
    730     v[0] = (V >> 24) & 0xFF;
    731     v[1] = (V >> 16) & 0xFF;
    732     v[2] = (V >>  8) & 0xFF;
    733     v[3] = (V      ) & 0xFF;
    734   }
    735   operator Type () const
    736   {
    737     return (v[0] << 24)
    738          + (v[1] << 16)
    739          + (v[2] <<  8)
    740          + (v[3]      );
    741   }
    742   private: uint8_t v[4];
    743 };
    744 
    745 
    746 /*
    747  * Lazy loaders.
    748  */
    749 
    750 template <typename Data, unsigned int WheresData>
    751 struct hb_data_wrapper_t
    752 {
    753   static_assert (WheresData > 0, "");
    754 
    755   Data * get_data () const
    756   { return *(((Data **) (void *) this) - WheresData); }
    757 
    758   bool is_inert () const { return !get_data (); }
    759 
    760   template <typename Stored, typename Subclass>
    761   Stored * call_create () const { return Subclass::create (get_data ()); }
    762 };
    763 template <>
    764 struct hb_data_wrapper_t<void, 0>
    765 {
    766   bool is_inert () const { return false; }
    767 
    768   template <typename Stored, typename Funcs>
    769   Stored * call_create () const { return Funcs::create (); }
    770 };
    771 
    772 template <typename T1, typename T2> struct hb_non_void_t { typedef T1 value; };
    773 template <typename T2> struct hb_non_void_t<void, T2> { typedef T2 value; };
    774 
    775 template <typename Returned,
    776 	  typename Subclass = void,
    777 	  typename Data = void,
    778 	  unsigned int WheresData = 0,
    779 	  typename Stored = Returned>
    780 struct hb_lazy_loader_t : hb_data_wrapper_t<Data, WheresData>
    781 {
    782   typedef typename hb_non_void_t<Subclass,
    783 				 hb_lazy_loader_t<Returned,Subclass,Data,WheresData,Stored>
    784 				>::value Funcs;
    785 
    786   void init0 () {} /* Init, when memory is already set to 0. No-op for us. */
    787   void init ()  { instance.set_relaxed (nullptr); }
    788   void fini ()  { do_destroy (instance.get ()); }
    789 
    790   void free_instance ()
    791   {
    792   retry:
    793     Stored *p = instance.get ();
    794     if (unlikely (p && !cmpexch (p, nullptr)))
    795       goto retry;
    796     do_destroy (p);
    797   }
    798 
    799   static void do_destroy (Stored *p)
    800   {
    801     if (p && p != const_cast<Stored *> (Funcs::get_null ()))
    802       Funcs::destroy (p);
    803   }
    804 
    805   const Returned * operator -> () const { return get (); }
    806   const Returned & operator * () const  { return *get (); }
    807   explicit_operator bool () const
    808   { return get_stored () != Funcs::get_null (); }
    809   template <typename C> operator const C * () const { return get (); }
    810 
    811   Stored * get_stored () const
    812   {
    813   retry:
    814     Stored *p = this->instance.get ();
    815     if (unlikely (!p))
    816     {
    817       if (unlikely (this->is_inert ()))
    818 	return const_cast<Stored *> (Funcs::get_null ());
    819 
    820       p = this->template call_create<Stored, Funcs> ();
    821       if (unlikely (!p))
    822 	p = const_cast<Stored *> (Funcs::get_null ());
    823 
    824       if (unlikely (!cmpexch (nullptr, p)))
    825       {
    826         do_destroy (p);
    827 	goto retry;
    828       }
    829     }
    830     return p;
    831   }
    832   Stored * get_stored_relaxed () const
    833   {
    834     return this->instance.get_relaxed ();
    835   }
    836 
    837   bool cmpexch (Stored *current, Stored *value) const
    838   {
    839     /* This *must* be called when there are no other threads accessing. */
    840     return this->instance.cmpexch (current, value);
    841   }
    842 
    843   const Returned * get () const { return Funcs::convert (get_stored ()); }
    844   const Returned * get_relaxed () const { return Funcs::convert (get_stored_relaxed ()); }
    845   Returned * get_unconst () const { return const_cast<Returned *> (Funcs::convert (get_stored ())); }
    846 
    847   /* To be possibly overloaded by subclasses. */
    848   static Returned* convert (Stored *p) { return p; }
    849 
    850   /* By default null/init/fini the object. */
    851   static const Stored* get_null () { return &Null(Stored); }
    852   static Stored *create (Data *data)
    853   {
    854     Stored *p = (Stored *) calloc (1, sizeof (Stored));
    855     if (likely (p))
    856       p->init (data);
    857     return p;
    858   }
    859   static Stored *create ()
    860   {
    861     Stored *p = (Stored *) calloc (1, sizeof (Stored));
    862     if (likely (p))
    863       p->init ();
    864     return p;
    865   }
    866   static void destroy (Stored *p)
    867   {
    868     p->fini ();
    869     free (p);
    870   }
    871 
    872 //  private:
    873   /* Must only have one pointer. */
    874   hb_atomic_ptr_t<Stored *> instance;
    875 };
    876 
    877 /* Specializations. */
    878 
    879 template <typename T, unsigned int WheresFace>
    880 struct hb_face_lazy_loader_t : hb_lazy_loader_t<T,
    881 						hb_face_lazy_loader_t<T, WheresFace>,
    882 						hb_face_t, WheresFace> {};
    883 
    884 template <typename T, unsigned int WheresFace>
    885 struct hb_table_lazy_loader_t : hb_lazy_loader_t<T,
    886 						 hb_table_lazy_loader_t<T, WheresFace>,
    887 						 hb_face_t, WheresFace,
    888 						 hb_blob_t>
    889 {
    890   static hb_blob_t *create (hb_face_t *face)
    891   { return hb_sanitize_context_t ().reference_table<T> (face); }
    892   static void destroy (hb_blob_t *p) { hb_blob_destroy (p); }
    893 
    894   static const hb_blob_t *get_null ()
    895   { return hb_blob_get_empty (); }
    896 
    897   static const T* convert (const hb_blob_t *blob)
    898   { return blob->as<T> (); }
    899 
    900   hb_blob_t* get_blob () const { return this->get_stored (); }
    901 };
    902 
    903 template <typename Subclass>
    904 struct hb_font_funcs_lazy_loader_t : hb_lazy_loader_t<hb_font_funcs_t, Subclass>
    905 {
    906   static void destroy (hb_font_funcs_t *p)
    907   { hb_font_funcs_destroy (p); }
    908   static const hb_font_funcs_t *get_null ()
    909   { return hb_font_funcs_get_empty (); }
    910 };
    911 template <typename Subclass>
    912 struct hb_unicode_funcs_lazy_loader_t : hb_lazy_loader_t<hb_unicode_funcs_t, Subclass>
    913 {
    914   static void destroy (hb_unicode_funcs_t *p)
    915   { hb_unicode_funcs_destroy (p); }
    916   static const hb_unicode_funcs_t *get_null ()
    917   { return hb_unicode_funcs_get_empty (); }
    918 };
    919 
    920 
    921 #endif /* HB_MACHINERY_HH */
    922