Home | History | Annotate | Download | only in cipher
      1 /* ====================================================================
      2  * Copyright (c) 2001-2011 The OpenSSL Project.  All rights reserved.
      3  *
      4  * Redistribution and use in source and binary forms, with or without
      5  * modification, are permitted provided that the following conditions
      6  * are met:
      7  *
      8  * 1. Redistributions of source code must retain the above copyright
      9  *    notice, this list of conditions and the following disclaimer.
     10  *
     11  * 2. Redistributions in binary form must reproduce the above copyright
     12  *    notice, this list of conditions and the following disclaimer in
     13  *    the documentation and/or other materials provided with the
     14  *    distribution.
     15  *
     16  * 3. All advertising materials mentioning features or use of this
     17  *    software must display the following acknowledgment:
     18  *    "This product includes software developed by the OpenSSL Project
     19  *    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
     20  *
     21  * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
     22  *    endorse or promote products derived from this software without
     23  *    prior written permission. For written permission, please contact
     24  *    openssl-core (at) openssl.org.
     25  *
     26  * 5. Products derived from this software may not be called "OpenSSL"
     27  *    nor may "OpenSSL" appear in their names without prior written
     28  *    permission of the OpenSSL Project.
     29  *
     30  * 6. Redistributions of any form whatsoever must retain the following
     31  *    acknowledgment:
     32  *    "This product includes software developed by the OpenSSL Project
     33  *    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
     34  *
     35  * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
     36  * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
     37  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     38  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
     39  * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     40  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
     41  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
     42  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
     43  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
     44  * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     45  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
     46  * OF THE POSSIBILITY OF SUCH DAMAGE.
     47  * ==================================================================== */
     48 
     49 #include <openssl/aead.h>
     50 #include <openssl/aes.h>
     51 #include <openssl/cipher.h>
     52 #include <openssl/cpu.h>
     53 #include <openssl/err.h>
     54 #include <openssl/mem.h>
     55 #include <openssl/modes.h>
     56 #include <openssl/obj.h>
     57 #include <openssl/rand.h>
     58 
     59 #include "internal.h"
     60 #include "../modes/internal.h"
     61 
     62 
     63 typedef struct {
     64   union {
     65     double align;
     66     AES_KEY ks;
     67   } ks;
     68   block128_f block;
     69   union {
     70     cbc128_f cbc;
     71     ctr128_f ctr;
     72   } stream;
     73 } EVP_AES_KEY;
     74 
     75 typedef struct {
     76   union {
     77     double align;
     78     AES_KEY ks;
     79   } ks;        /* AES key schedule to use */
     80   int key_set; /* Set if key initialised */
     81   int iv_set;  /* Set if an iv is set */
     82   GCM128_CONTEXT gcm;
     83   uint8_t *iv; /* Temporary IV store */
     84   int ivlen;         /* IV length */
     85   int taglen;
     86   int iv_gen;      /* It is OK to generate IVs */
     87   ctr128_f ctr;
     88 } EVP_AES_GCM_CTX;
     89 
     90 #if !defined(OPENSSL_NO_ASM) && \
     91     (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
     92 #define VPAES
     93 extern unsigned int OPENSSL_ia32cap_P[];
     94 
     95 static char vpaes_capable(void) {
     96   return (OPENSSL_ia32cap_P[1] & (1 << (41 - 32))) != 0;
     97 }
     98 
     99 #if defined(OPENSSL_X86_64)
    100 #define BSAES
    101 static char bsaes_capable(void) {
    102   return vpaes_capable();
    103 }
    104 #endif
    105 
    106 #elif !defined(OPENSSL_NO_ASM) && defined(OPENSSL_ARM)
    107 #include "../arm_arch.h"
    108 #if __ARM_ARCH__ >= 7
    109 #define BSAES
    110 static char bsaes_capable(void) {
    111   return CRYPTO_is_NEON_capable();
    112 }
    113 #endif  /* __ARM_ARCH__ >= 7 */
    114 #endif  /* OPENSSL_ARM */
    115 
    116 #if defined(BSAES)
    117 /* On platforms where BSAES gets defined (just above), then these functions are
    118  * provided by asm. */
    119 void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
    120                        const AES_KEY *key, uint8_t ivec[16], int enc);
    121 void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
    122                                 const AES_KEY *key, const uint8_t ivec[16]);
    123 #else
    124 static char bsaes_capable(void) {
    125   return 0;
    126 }
    127 
    128 /* On other platforms, bsaes_capable() will always return false and so the
    129  * following will never be called. */
    130 void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
    131                        const AES_KEY *key, uint8_t ivec[16], int enc) {
    132   abort();
    133 }
    134 
    135 void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
    136                                 const AES_KEY *key, const uint8_t ivec[16]) {
    137   abort();
    138 }
    139 #endif
    140 
    141 #if defined(VPAES)
    142 /* On platforms where VPAES gets defined (just above), then these functions are
    143  * provided by asm. */
    144 int vpaes_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
    145 int vpaes_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
    146 
    147 void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
    148 void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
    149 
    150 void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
    151                        const AES_KEY *key, uint8_t *ivec, int enc);
    152 #else
    153 static char vpaes_capable(void) {
    154   return 0;
    155 }
    156 
    157 /* On other platforms, vpaes_capable() will always return false and so the
    158  * following will never be called. */
    159 int vpaes_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key) {
    160   abort();
    161 }
    162 int vpaes_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key) {
    163   abort();
    164 }
    165 void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
    166   abort();
    167 }
    168 void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
    169   abort();
    170 }
    171 void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
    172                        const AES_KEY *key, uint8_t *ivec, int enc) {
    173   abort();
    174 }
    175 #endif
    176 
    177 #if !defined(OPENSSL_NO_ASM) && \
    178     (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
    179 int aesni_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
    180 int aesni_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
    181 
    182 void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
    183 void aesni_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
    184 
    185 void aesni_ecb_encrypt(const uint8_t *in, uint8_t *out, size_t length,
    186                        const AES_KEY *key, int enc);
    187 void aesni_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
    188                        const AES_KEY *key, uint8_t *ivec, int enc);
    189 
    190 void aesni_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t blocks,
    191                                 const void *key, const uint8_t *ivec);
    192 
    193 #if defined(OPENSSL_X86_64)
    194 size_t aesni_gcm_encrypt(const uint8_t *in, uint8_t *out, size_t len,
    195                          const void *key, uint8_t ivec[16], uint64_t *Xi);
    196 #define AES_gcm_encrypt aesni_gcm_encrypt
    197 size_t aesni_gcm_decrypt(const uint8_t *in, uint8_t *out, size_t len,
    198                          const void *key, uint8_t ivec[16], uint64_t *Xi);
    199 #define AES_gcm_decrypt aesni_gcm_decrypt
    200 void gcm_ghash_avx(uint64_t Xi[2], const u128 Htable[16], const uint8_t *in,
    201                    size_t len);
    202 #define AES_GCM_ASM(gctx) \
    203   (gctx->ctr == aesni_ctr32_encrypt_blocks && gctx->gcm.ghash == gcm_ghash_avx)
    204 #endif  /* OPENSSL_X86_64 */
    205 
    206 #else
    207 
    208 /* On other platforms, aesni_capable() will always return false and so the
    209  * following will never be called. */
    210 void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
    211   abort();
    212 }
    213 int aesni_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key) {
    214   abort();
    215 }
    216 void aesni_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t blocks,
    217                                 const void *key, const uint8_t *ivec) {
    218   abort();
    219 }
    220 
    221 #endif
    222 
    223 static int aes_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
    224                         const uint8_t *iv, int enc) {
    225   int ret, mode;
    226   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
    227 
    228   mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
    229   if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
    230     if (bsaes_capable() && mode == EVP_CIPH_CBC_MODE) {
    231       ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
    232       dat->block = (block128_f)AES_decrypt;
    233       dat->stream.cbc = (cbc128_f)bsaes_cbc_encrypt;
    234     } else if (vpaes_capable()) {
    235       ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
    236       dat->block = (block128_f)vpaes_decrypt;
    237       dat->stream.cbc =
    238           mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
    239     } else {
    240       ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
    241       dat->block = (block128_f)AES_decrypt;
    242       dat->stream.cbc =
    243           mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
    244     }
    245   } else if (bsaes_capable() && mode == EVP_CIPH_CTR_MODE) {
    246     ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
    247     dat->block = (block128_f)AES_encrypt;
    248     dat->stream.ctr = (ctr128_f)bsaes_ctr32_encrypt_blocks;
    249   } else if (vpaes_capable()) {
    250     ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
    251     dat->block = (block128_f)vpaes_encrypt;
    252     dat->stream.cbc =
    253         mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
    254   } else {
    255     ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
    256     dat->block = (block128_f)AES_encrypt;
    257     dat->stream.cbc =
    258         mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
    259   }
    260 
    261   if (ret < 0) {
    262     OPENSSL_PUT_ERROR(CIPHER, aes_init_key, CIPHER_R_AES_KEY_SETUP_FAILED);
    263     return 0;
    264   }
    265 
    266   return 1;
    267 }
    268 
    269 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
    270                           const unsigned char *in, size_t len) {
    271   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
    272 
    273   if (dat->stream.cbc) {
    274     (*dat->stream.cbc)(in, out, len, &dat->ks, ctx->iv, ctx->encrypt);
    275   } else if (ctx->encrypt) {
    276     CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
    277   } else {
    278     CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
    279   }
    280 
    281   return 1;
    282 }
    283 
    284 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
    285                           const unsigned char *in, size_t len) {
    286   size_t bl = ctx->cipher->block_size;
    287   size_t i;
    288   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
    289 
    290   if (len < bl) {
    291     return 1;
    292   }
    293 
    294   for (i = 0, len -= bl; i <= len; i += bl) {
    295     (*dat->block)(in + i, out + i, &dat->ks);
    296   }
    297 
    298   return 1;
    299 }
    300 
    301 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
    302                           const unsigned char *in, size_t len) {
    303   unsigned int num = ctx->num;
    304   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
    305 
    306   if (dat->stream.ctr) {
    307     CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks, ctx->iv, ctx->buf, &num,
    308                                 dat->stream.ctr);
    309   } else {
    310     CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, ctx->iv, ctx->buf, &num,
    311                           dat->block);
    312   }
    313   ctx->num = (size_t)num;
    314   return 1;
    315 }
    316 
    317 static ctr128_f aes_gcm_set_key(AES_KEY *aes_key, GCM128_CONTEXT *gcm_ctx,
    318                                 const uint8_t *key, size_t key_len) {
    319   if (bsaes_capable()) {
    320     AES_set_encrypt_key(key, key_len * 8, aes_key);
    321     CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
    322     return (ctr128_f)bsaes_ctr32_encrypt_blocks;
    323   }
    324 
    325   if (vpaes_capable()) {
    326     vpaes_set_encrypt_key(key, key_len * 8, aes_key);
    327     CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)vpaes_encrypt);
    328     return NULL;
    329   }
    330 
    331   AES_set_encrypt_key(key, key_len * 8, aes_key);
    332   CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
    333   return NULL;
    334 }
    335 
    336 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
    337                             const uint8_t *iv, int enc) {
    338   EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
    339   if (!iv && !key) {
    340     return 1;
    341   }
    342   if (key) {
    343     gctx->ctr = aes_gcm_set_key(&gctx->ks.ks, &gctx->gcm, key, ctx->key_len);
    344     /* If we have an iv can set it directly, otherwise use saved IV. */
    345     if (iv == NULL && gctx->iv_set) {
    346       iv = gctx->iv;
    347     }
    348     if (iv) {
    349       CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
    350       gctx->iv_set = 1;
    351     }
    352     gctx->key_set = 1;
    353   } else {
    354     /* If key set use IV, otherwise copy */
    355     if (gctx->key_set) {
    356       CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
    357     } else {
    358       memcpy(gctx->iv, iv, gctx->ivlen);
    359     }
    360     gctx->iv_set = 1;
    361     gctx->iv_gen = 0;
    362   }
    363   return 1;
    364 }
    365 
    366 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c) {
    367   EVP_AES_GCM_CTX *gctx = c->cipher_data;
    368   OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
    369   if (gctx->iv != c->iv) {
    370     OPENSSL_free(gctx->iv);
    371   }
    372   return 1;
    373 }
    374 
    375 /* increment counter (64-bit int) by 1 */
    376 static void ctr64_inc(uint8_t *counter) {
    377   int n = 8;
    378   uint8_t c;
    379 
    380   do {
    381     --n;
    382     c = counter[n];
    383     ++c;
    384     counter[n] = c;
    385     if (c) {
    386       return;
    387     }
    388   } while (n);
    389 }
    390 
    391 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
    392   EVP_AES_GCM_CTX *gctx = c->cipher_data;
    393   switch (type) {
    394     case EVP_CTRL_INIT:
    395       gctx->key_set = 0;
    396       gctx->iv_set = 0;
    397       gctx->ivlen = c->cipher->iv_len;
    398       gctx->iv = c->iv;
    399       gctx->taglen = -1;
    400       gctx->iv_gen = 0;
    401       return 1;
    402 
    403     case EVP_CTRL_GCM_SET_IVLEN:
    404       if (arg <= 0) {
    405         return 0;
    406       }
    407 
    408       /* Allocate memory for IV if needed */
    409       if (arg > EVP_MAX_IV_LENGTH && arg > gctx->ivlen) {
    410         if (gctx->iv != c->iv) {
    411           OPENSSL_free(gctx->iv);
    412         }
    413         gctx->iv = OPENSSL_malloc(arg);
    414         if (!gctx->iv) {
    415           return 0;
    416         }
    417       }
    418       gctx->ivlen = arg;
    419       return 1;
    420 
    421     case EVP_CTRL_GCM_SET_TAG:
    422       if (arg <= 0 || arg > 16 || c->encrypt) {
    423         return 0;
    424       }
    425       memcpy(c->buf, ptr, arg);
    426       gctx->taglen = arg;
    427       return 1;
    428 
    429     case EVP_CTRL_GCM_GET_TAG:
    430       if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) {
    431         return 0;
    432       }
    433       memcpy(ptr, c->buf, arg);
    434       return 1;
    435 
    436     case EVP_CTRL_GCM_SET_IV_FIXED:
    437       /* Special case: -1 length restores whole IV */
    438       if (arg == -1) {
    439         memcpy(gctx->iv, ptr, gctx->ivlen);
    440         gctx->iv_gen = 1;
    441         return 1;
    442       }
    443       /* Fixed field must be at least 4 bytes and invocation field
    444        * at least 8. */
    445       if (arg < 4 || (gctx->ivlen - arg) < 8) {
    446         return 0;
    447       }
    448       if (arg) {
    449         memcpy(gctx->iv, ptr, arg);
    450       }
    451       if (c->encrypt &&
    452           RAND_pseudo_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0) {
    453         return 0;
    454       }
    455       gctx->iv_gen = 1;
    456       return 1;
    457 
    458     case EVP_CTRL_GCM_IV_GEN:
    459       if (gctx->iv_gen == 0 || gctx->key_set == 0) {
    460         return 0;
    461       }
    462       CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
    463       if (arg <= 0 || arg > gctx->ivlen) {
    464         arg = gctx->ivlen;
    465       }
    466       memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
    467       /* Invocation field will be at least 8 bytes in size and
    468        * so no need to check wrap around or increment more than
    469        * last 8 bytes. */
    470       ctr64_inc(gctx->iv + gctx->ivlen - 8);
    471       gctx->iv_set = 1;
    472       return 1;
    473 
    474     case EVP_CTRL_GCM_SET_IV_INV:
    475       if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) {
    476         return 0;
    477       }
    478       memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
    479       CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
    480       gctx->iv_set = 1;
    481       return 1;
    482 
    483     case EVP_CTRL_COPY: {
    484       EVP_CIPHER_CTX *out = ptr;
    485       EVP_AES_GCM_CTX *gctx_out = out->cipher_data;
    486       if (gctx->gcm.key) {
    487         if (gctx->gcm.key != &gctx->ks) {
    488           return 0;
    489         }
    490         gctx_out->gcm.key = &gctx_out->ks;
    491       }
    492       if (gctx->iv == c->iv) {
    493         gctx_out->iv = out->iv;
    494       } else {
    495         gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
    496         if (!gctx_out->iv) {
    497           return 0;
    498         }
    499         memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
    500       }
    501       return 1;
    502     }
    503 
    504     default:
    505       return -1;
    506   }
    507 }
    508 
    509 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
    510                           size_t len) {
    511   EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
    512 
    513   /* If not set up, return error */
    514   if (!gctx->key_set) {
    515     return -1;
    516   }
    517   if (!gctx->iv_set) {
    518     return -1;
    519   }
    520 
    521   if (in) {
    522     if (out == NULL) {
    523       if (!CRYPTO_gcm128_aad(&gctx->gcm, in, len)) {
    524         return -1;
    525       }
    526     } else if (ctx->encrypt) {
    527       if (gctx->ctr) {
    528         size_t bulk = 0;
    529 #if defined(AES_GCM_ASM)
    530         if (len >= 32 && AES_GCM_ASM(gctx)) {
    531           size_t res = (16 - gctx->gcm.mres) % 16;
    532 
    533           if (!CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res)) {
    534             return -1;
    535           }
    536 
    537           bulk = AES_gcm_encrypt(in + res, out + res, len - res, gctx->gcm.key,
    538                                  gctx->gcm.Yi.c, gctx->gcm.Xi.u);
    539           gctx->gcm.len.u[1] += bulk;
    540           bulk += res;
    541         }
    542 #endif
    543         if (!CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, in + bulk, out + bulk,
    544                                         len - bulk, gctx->ctr)) {
    545           return -1;
    546         }
    547       } else {
    548         size_t bulk = 0;
    549         if (!CRYPTO_gcm128_encrypt(&gctx->gcm, in + bulk, out + bulk,
    550                                   len - bulk)) {
    551           return -1;
    552         }
    553       }
    554     } else {
    555       if (gctx->ctr) {
    556         size_t bulk = 0;
    557 #if defined(AES_GCM_ASM)
    558         if (len >= 16 && AES_GCM_ASM(gctx)) {
    559           size_t res = (16 - gctx->gcm.mres) % 16;
    560 
    561           if (!CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res)) {
    562             return -1;
    563           }
    564 
    565           bulk = AES_gcm_decrypt(in + res, out + res, len - res, gctx->gcm.key,
    566                                  gctx->gcm.Yi.c, gctx->gcm.Xi.u);
    567           gctx->gcm.len.u[1] += bulk;
    568           bulk += res;
    569         }
    570 #endif
    571         if (!CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, in + bulk, out + bulk,
    572                                         len - bulk, gctx->ctr)) {
    573           return -1;
    574         }
    575       } else {
    576         size_t bulk = 0;
    577         if (!CRYPTO_gcm128_decrypt(&gctx->gcm, in + bulk, out + bulk,
    578                                   len - bulk)) {
    579           return -1;
    580         }
    581       }
    582     }
    583     return len;
    584   } else {
    585     if (!ctx->encrypt) {
    586       if (gctx->taglen < 0 ||
    587           !CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0) {
    588         return -1;
    589       }
    590       gctx->iv_set = 0;
    591       return 0;
    592     }
    593     CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
    594     gctx->taglen = 16;
    595     /* Don't reuse the IV */
    596     gctx->iv_set = 0;
    597     return 0;
    598   }
    599 }
    600 
    601 static const EVP_CIPHER aes_128_cbc = {
    602     NID_aes_128_cbc,     16 /* block_size */, 16 /* key_size */,
    603     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
    604     NULL /* app_data */, aes_init_key,        aes_cbc_cipher,
    605     NULL /* cleanup */,  NULL /* ctrl */};
    606 
    607 static const EVP_CIPHER aes_128_ctr = {
    608     NID_aes_128_ctr,     1 /* block_size */,  16 /* key_size */,
    609     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
    610     NULL /* app_data */, aes_init_key,        aes_ctr_cipher,
    611     NULL /* cleanup */,  NULL /* ctrl */};
    612 
    613 static const EVP_CIPHER aes_128_ecb = {
    614     NID_aes_128_ecb,     16 /* block_size */, 16 /* key_size */,
    615     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
    616     NULL /* app_data */, aes_init_key,        aes_ecb_cipher,
    617     NULL /* cleanup */,  NULL /* ctrl */};
    618 
    619 static const EVP_CIPHER aes_128_gcm = {
    620     NID_aes_128_gcm, 1 /* block_size */, 16 /* key_size */, 12 /* iv_len */,
    621     sizeof(EVP_AES_GCM_CTX),
    622     EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
    623         EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
    624         EVP_CIPH_FLAG_AEAD_CIPHER,
    625     NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
    626     aes_gcm_ctrl};
    627 
    628 
    629 static const EVP_CIPHER aes_256_cbc = {
    630     NID_aes_128_cbc,     16 /* block_size */, 32 /* key_size */,
    631     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
    632     NULL /* app_data */, aes_init_key,        aes_cbc_cipher,
    633     NULL /* cleanup */,  NULL /* ctrl */};
    634 
    635 static const EVP_CIPHER aes_256_ctr = {
    636     NID_aes_128_ctr,     1 /* block_size */,  32 /* key_size */,
    637     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
    638     NULL /* app_data */, aes_init_key,        aes_ctr_cipher,
    639     NULL /* cleanup */,  NULL /* ctrl */};
    640 
    641 static const EVP_CIPHER aes_256_ecb = {
    642     NID_aes_128_ecb,     16 /* block_size */, 32 /* key_size */,
    643     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
    644     NULL /* app_data */, aes_init_key,        aes_ecb_cipher,
    645     NULL /* cleanup */,  NULL /* ctrl */};
    646 
    647 static const EVP_CIPHER aes_256_gcm = {
    648     NID_aes_128_gcm, 1 /* block_size */, 32 /* key_size */, 12 /* iv_len */,
    649     sizeof(EVP_AES_GCM_CTX),
    650     EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
    651         EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
    652         EVP_CIPH_FLAG_AEAD_CIPHER,
    653     NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
    654     aes_gcm_ctrl};
    655 
    656 #if !defined(OPENSSL_NO_ASM) && \
    657     (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
    658 
    659 /* AES-NI section. */
    660 
    661 static char aesni_capable(void) {
    662   return (OPENSSL_ia32cap_P[1] & (1 << (57 - 32))) != 0;
    663 }
    664 
    665 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
    666                           const uint8_t *iv, int enc) {
    667   int ret, mode;
    668   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
    669 
    670   mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
    671   if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
    672     ret = aesni_set_decrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
    673     dat->block = (block128_f)aesni_decrypt;
    674     dat->stream.cbc =
    675         mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aesni_cbc_encrypt : NULL;
    676   } else {
    677     ret = aesni_set_encrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
    678     dat->block = (block128_f)aesni_encrypt;
    679     if (mode == EVP_CIPH_CBC_MODE) {
    680       dat->stream.cbc = (cbc128_f)aesni_cbc_encrypt;
    681     } else if (mode == EVP_CIPH_CTR_MODE) {
    682       dat->stream.ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
    683     } else {
    684       dat->stream.cbc = NULL;
    685     }
    686   }
    687 
    688   if (ret < 0) {
    689     OPENSSL_PUT_ERROR(CIPHER, aesni_init_key, CIPHER_R_AES_KEY_SETUP_FAILED);
    690     return 0;
    691   }
    692 
    693   return 1;
    694 }
    695 
    696 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
    697                             const uint8_t *in, size_t len) {
    698   aesni_cbc_encrypt(in, out, len, ctx->cipher_data, ctx->iv, ctx->encrypt);
    699 
    700   return 1;
    701 }
    702 
    703 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
    704                             const uint8_t *in, size_t len) {
    705   size_t bl = ctx->cipher->block_size;
    706 
    707   if (len < bl) {
    708     return 1;
    709   }
    710 
    711   aesni_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
    712 
    713   return 1;
    714 }
    715 
    716 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
    717                               const uint8_t *iv, int enc) {
    718   EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
    719   if (!iv && !key) {
    720     return 1;
    721   }
    722   if (key) {
    723     aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
    724     CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f)aesni_encrypt);
    725     gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
    726     /* If we have an iv can set it directly, otherwise use
    727      * saved IV. */
    728     if (iv == NULL && gctx->iv_set) {
    729       iv = gctx->iv;
    730     }
    731     if (iv) {
    732       CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
    733       gctx->iv_set = 1;
    734     }
    735     gctx->key_set = 1;
    736   } else {
    737     /* If key set use IV, otherwise copy */
    738     if (gctx->key_set) {
    739       CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
    740     } else {
    741       memcpy(gctx->iv, iv, gctx->ivlen);
    742     }
    743     gctx->iv_set = 1;
    744     gctx->iv_gen = 0;
    745   }
    746   return 1;
    747 }
    748 
    749 static const EVP_CIPHER aesni_128_cbc = {
    750     NID_aes_128_cbc,     16 /* block_size */, 16 /* key_size */,
    751     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
    752     NULL /* app_data */, aesni_init_key,      aesni_cbc_cipher,
    753     NULL /* cleanup */,  NULL /* ctrl */};
    754 
    755 static const EVP_CIPHER aesni_128_ctr = {
    756     NID_aes_128_ctr,     1 /* block_size */,  16 /* key_size */,
    757     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
    758     NULL /* app_data */, aesni_init_key,      aes_ctr_cipher,
    759     NULL /* cleanup */,  NULL /* ctrl */};
    760 
    761 static const EVP_CIPHER aesni_128_ecb = {
    762     NID_aes_128_ecb,     16 /* block_size */, 16 /* key_size */,
    763     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
    764     NULL /* app_data */, aesni_init_key,      aesni_ecb_cipher,
    765     NULL /* cleanup */,  NULL /* ctrl */};
    766 
    767 static const EVP_CIPHER aesni_128_gcm = {
    768     NID_aes_128_gcm, 1 /* block_size */, 16 /* key_size */, 12 /* iv_len */,
    769     sizeof(EVP_AES_GCM_CTX),
    770     EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
    771         EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
    772         EVP_CIPH_FLAG_AEAD_CIPHER,
    773     NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
    774     aes_gcm_ctrl};
    775 
    776 
    777 static const EVP_CIPHER aesni_256_cbc = {
    778     NID_aes_128_cbc,     16 /* block_size */, 32 /* key_size */,
    779     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
    780     NULL /* app_data */, aesni_init_key,      aesni_cbc_cipher,
    781     NULL /* cleanup */,  NULL /* ctrl */};
    782 
    783 static const EVP_CIPHER aesni_256_ctr = {
    784     NID_aes_128_ctr,     1 /* block_size */,  32 /* key_size */,
    785     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
    786     NULL /* app_data */, aesni_init_key,      aes_ctr_cipher,
    787     NULL /* cleanup */,  NULL /* ctrl */};
    788 
    789 static const EVP_CIPHER aesni_256_ecb = {
    790     NID_aes_128_ecb,     16 /* block_size */, 32 /* key_size */,
    791     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
    792     NULL /* app_data */, aesni_init_key,      aesni_ecb_cipher,
    793     NULL /* cleanup */,  NULL /* ctrl */};
    794 
    795 static const EVP_CIPHER aesni_256_gcm = {
    796     NID_aes_256_gcm, 1 /* block_size */, 32 /* key_size */, 12 /* iv_len */,
    797     sizeof(EVP_AES_GCM_CTX),
    798     EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
    799         EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT | EVP_CIPH_CUSTOM_COPY |
    800         EVP_CIPH_FLAG_AEAD_CIPHER,
    801     NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
    802     aes_gcm_ctrl};
    803 
    804 #define EVP_CIPHER_FUNCTION(keybits, mode)             \
    805   const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
    806     if (aesni_capable()) {                             \
    807       return &aesni_##keybits##_##mode;                \
    808     } else {                                           \
    809       return &aes_##keybits##_##mode;                  \
    810     }                                                  \
    811   }
    812 
    813 #else  /* ^^^  OPENSSL_X86_64 || OPENSSL_X86 */
    814 
    815 static char aesni_capable(void) {
    816   return 0;
    817 }
    818 
    819 #define EVP_CIPHER_FUNCTION(keybits, mode)             \
    820   const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
    821     return &aes_##keybits##_##mode;                    \
    822   }
    823 
    824 #endif
    825 
    826 EVP_CIPHER_FUNCTION(128, cbc)
    827 EVP_CIPHER_FUNCTION(128, ctr)
    828 EVP_CIPHER_FUNCTION(128, ecb)
    829 EVP_CIPHER_FUNCTION(128, gcm)
    830 
    831 EVP_CIPHER_FUNCTION(256, cbc)
    832 EVP_CIPHER_FUNCTION(256, ctr)
    833 EVP_CIPHER_FUNCTION(256, ecb)
    834 EVP_CIPHER_FUNCTION(256, gcm)
    835 
    836 
    837 #define EVP_AEAD_AES_GCM_TAG_LEN 16
    838 
    839 struct aead_aes_gcm_ctx {
    840   union {
    841     double align;
    842     AES_KEY ks;
    843   } ks;
    844   GCM128_CONTEXT gcm;
    845   ctr128_f ctr;
    846   uint8_t tag_len;
    847 };
    848 
    849 static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
    850                              size_t key_len, size_t tag_len) {
    851   struct aead_aes_gcm_ctx *gcm_ctx;
    852   const size_t key_bits = key_len * 8;
    853 
    854   if (key_bits != 128 && key_bits != 256) {
    855     OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_init, CIPHER_R_BAD_KEY_LENGTH);
    856     return 0; /* EVP_AEAD_CTX_init should catch this. */
    857   }
    858 
    859   if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
    860     tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
    861   }
    862 
    863   if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
    864     OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_init, CIPHER_R_TAG_TOO_LARGE);
    865     return 0;
    866   }
    867 
    868   gcm_ctx = OPENSSL_malloc(sizeof(struct aead_aes_gcm_ctx));
    869   if (gcm_ctx == NULL) {
    870     return 0;
    871   }
    872 
    873   if (aesni_capable()) {
    874     aesni_set_encrypt_key(key, key_len * 8, &gcm_ctx->ks.ks);
    875     CRYPTO_gcm128_init(&gcm_ctx->gcm, &gcm_ctx->ks.ks,
    876                        (block128_f)aesni_encrypt);
    877     gcm_ctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
    878   } else {
    879     gcm_ctx->ctr =
    880         aes_gcm_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm, key, key_len);
    881   }
    882   gcm_ctx->tag_len = tag_len;
    883   ctx->aead_state = gcm_ctx;
    884 
    885   return 1;
    886 }
    887 
    888 static void aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx) {
    889   struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
    890   OPENSSL_cleanse(gcm_ctx, sizeof(struct aead_aes_gcm_ctx));
    891   OPENSSL_free(gcm_ctx);
    892 }
    893 
    894 static int aead_aes_gcm_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
    895                              size_t *out_len, size_t max_out_len,
    896                              const uint8_t *nonce, size_t nonce_len,
    897                              const uint8_t *in, size_t in_len,
    898                              const uint8_t *ad, size_t ad_len) {
    899   size_t bulk = 0;
    900   const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
    901   GCM128_CONTEXT gcm;
    902 
    903   if (in_len + gcm_ctx->tag_len < in_len) {
    904     OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_seal, CIPHER_R_TOO_LARGE);
    905     return 0;
    906   }
    907 
    908   if (max_out_len < in_len + gcm_ctx->tag_len) {
    909     OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_seal, CIPHER_R_BUFFER_TOO_SMALL);
    910     return 0;
    911   }
    912 
    913   memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
    914   CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
    915 
    916   if (ad_len > 0 && !CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
    917     return 0;
    918   }
    919 
    920   if (gcm_ctx->ctr) {
    921     if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, in + bulk, out + bulk, in_len - bulk,
    922                                      gcm_ctx->ctr)) {
    923       return 0;
    924     }
    925   } else {
    926     if (!CRYPTO_gcm128_encrypt(&gcm, in + bulk, out + bulk, in_len - bulk)) {
    927       return 0;
    928     }
    929   }
    930 
    931   CRYPTO_gcm128_tag(&gcm, out + in_len, gcm_ctx->tag_len);
    932   *out_len = in_len + gcm_ctx->tag_len;
    933   return 1;
    934 }
    935 
    936 static int aead_aes_gcm_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
    937                              size_t *out_len, size_t max_out_len,
    938                              const uint8_t *nonce, size_t nonce_len,
    939                              const uint8_t *in, size_t in_len,
    940                              const uint8_t *ad, size_t ad_len) {
    941   size_t bulk = 0;
    942   const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
    943   uint8_t tag[EVP_AEAD_AES_GCM_TAG_LEN];
    944   size_t plaintext_len;
    945   GCM128_CONTEXT gcm;
    946 
    947   if (in_len < gcm_ctx->tag_len) {
    948     OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_open, CIPHER_R_BAD_DECRYPT);
    949     return 0;
    950   }
    951 
    952   plaintext_len = in_len - gcm_ctx->tag_len;
    953 
    954   if (max_out_len < plaintext_len) {
    955     OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_open, CIPHER_R_BUFFER_TOO_SMALL);
    956     return 0;
    957   }
    958 
    959   memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
    960   CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
    961 
    962   if (!CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
    963     return 0;
    964   }
    965 
    966   if (gcm_ctx->ctr) {
    967     if (!CRYPTO_gcm128_decrypt_ctr32(&gcm, in + bulk, out + bulk,
    968                                      in_len - bulk - gcm_ctx->tag_len,
    969                                      gcm_ctx->ctr)) {
    970       return 0;
    971     }
    972   } else {
    973     if (!CRYPTO_gcm128_decrypt(&gcm, in + bulk, out + bulk,
    974                                in_len - bulk - gcm_ctx->tag_len)) {
    975       return 0;
    976     }
    977   }
    978 
    979   CRYPTO_gcm128_tag(&gcm, tag, gcm_ctx->tag_len);
    980   if (CRYPTO_memcmp(tag, in + plaintext_len, gcm_ctx->tag_len) != 0) {
    981     OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_open, CIPHER_R_BAD_DECRYPT);
    982     return 0;
    983   }
    984 
    985   *out_len = plaintext_len;
    986   return 1;
    987 }
    988 
    989 static const EVP_AEAD aead_aes_128_gcm = {
    990     16,                       /* key len */
    991     12,                       /* nonce len */
    992     EVP_AEAD_AES_GCM_TAG_LEN, /* overhead */
    993     EVP_AEAD_AES_GCM_TAG_LEN, /* max tag length */
    994     aead_aes_gcm_init,        aead_aes_gcm_cleanup,
    995     aead_aes_gcm_seal,        aead_aes_gcm_open,
    996 };
    997 
    998 static const EVP_AEAD aead_aes_256_gcm = {
    999     32,                       /* key len */
   1000     12,                       /* nonce len */
   1001     EVP_AEAD_AES_GCM_TAG_LEN, /* overhead */
   1002     EVP_AEAD_AES_GCM_TAG_LEN, /* max tag length */
   1003     aead_aes_gcm_init,        aead_aes_gcm_cleanup,
   1004     aead_aes_gcm_seal,        aead_aes_gcm_open,
   1005 };
   1006 
   1007 const EVP_AEAD *EVP_aead_aes_128_gcm(void) { return &aead_aes_128_gcm; }
   1008 
   1009 const EVP_AEAD *EVP_aead_aes_256_gcm(void) { return &aead_aes_256_gcm; }
   1010 
   1011 
   1012 /* AES Key Wrap is specified in
   1013  * http://csrc.nist.gov/groups/ST/toolkit/documents/kms/key-wrap.pdf
   1014  * or https://tools.ietf.org/html/rfc3394 */
   1015 
   1016 struct aead_aes_key_wrap_ctx {
   1017   uint8_t key[32];
   1018   unsigned key_bits;
   1019 };
   1020 
   1021 static int aead_aes_key_wrap_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
   1022                                   size_t key_len, size_t tag_len) {
   1023   struct aead_aes_key_wrap_ctx *kw_ctx;
   1024   const size_t key_bits = key_len * 8;
   1025 
   1026   if (key_bits != 128 && key_bits != 256) {
   1027     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_init, CIPHER_R_BAD_KEY_LENGTH);
   1028     return 0; /* EVP_AEAD_CTX_init should catch this. */
   1029   }
   1030 
   1031   if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
   1032     tag_len = 8;
   1033   }
   1034 
   1035   if (tag_len != 8) {
   1036     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_init,
   1037                       CIPHER_R_UNSUPPORTED_TAG_SIZE);
   1038     return 0;
   1039   }
   1040 
   1041   kw_ctx = OPENSSL_malloc(sizeof(struct aead_aes_key_wrap_ctx));
   1042   if (kw_ctx == NULL) {
   1043     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_init, ERR_R_MALLOC_FAILURE);
   1044     return 0;
   1045   }
   1046 
   1047   memcpy(kw_ctx->key, key, key_len);
   1048   kw_ctx->key_bits = key_bits;
   1049 
   1050   ctx->aead_state = kw_ctx;
   1051   return 1;
   1052 }
   1053 
   1054 static void aead_aes_key_wrap_cleanup(EVP_AEAD_CTX *ctx) {
   1055   struct aead_aes_key_wrap_ctx *kw_ctx = ctx->aead_state;
   1056   OPENSSL_cleanse(kw_ctx, sizeof(struct aead_aes_key_wrap_ctx));
   1057   OPENSSL_free(kw_ctx);
   1058 }
   1059 
   1060 /* kDefaultAESKeyWrapNonce is the default nonce value given in 2.2.3.1. */
   1061 static const uint8_t kDefaultAESKeyWrapNonce[8] = {0xa6, 0xa6, 0xa6, 0xa6,
   1062                                                    0xa6, 0xa6, 0xa6, 0xa6};
   1063 
   1064 
   1065 static int aead_aes_key_wrap_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
   1066                                   size_t *out_len, size_t max_out_len,
   1067                                   const uint8_t *nonce, size_t nonce_len,
   1068                                   const uint8_t *in, size_t in_len,
   1069                                   const uint8_t *ad, size_t ad_len) {
   1070   const struct aead_aes_key_wrap_ctx *kw_ctx = ctx->aead_state;
   1071   union {
   1072     double align;
   1073     AES_KEY ks;
   1074   } ks;
   1075   /* Variables in this function match up with the variables in the second half
   1076    * of section 2.2.1. */
   1077   unsigned i, j, n;
   1078   uint8_t A[AES_BLOCK_SIZE];
   1079 
   1080   if (ad_len != 0) {
   1081     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_seal,
   1082                       CIPHER_R_UNSUPPORTED_AD_SIZE);
   1083     return 0;
   1084   }
   1085 
   1086   if (nonce_len == 0) {
   1087     nonce = kDefaultAESKeyWrapNonce;
   1088     nonce_len = sizeof(kDefaultAESKeyWrapNonce);
   1089   }
   1090 
   1091   if (nonce_len != 8) {
   1092     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_seal,
   1093                       CIPHER_R_UNSUPPORTED_NONCE_SIZE);
   1094     return 0;
   1095   }
   1096 
   1097   if (in_len % 8 != 0) {
   1098     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_seal,
   1099                       CIPHER_R_UNSUPPORTED_INPUT_SIZE);
   1100     return 0;
   1101   }
   1102 
   1103   /* The code below only handles a 32-bit |t| thus 6*|n| must be less than
   1104    * 2^32, where |n| is |in_len| / 8. So in_len < 4/3 * 2^32 and we
   1105    * conservatively cap it to 2^32-16 to stop 32-bit platforms complaining that
   1106    * a comparision is always true. */
   1107   if (in_len > 0xfffffff0) {
   1108     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_seal, CIPHER_R_TOO_LARGE);
   1109     return 0;
   1110   }
   1111 
   1112   n = in_len / 8;
   1113 
   1114   if (n < 2) {
   1115     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_seal,
   1116                       CIPHER_R_UNSUPPORTED_INPUT_SIZE);
   1117     return 0;
   1118   }
   1119 
   1120   if (in_len + 8 < in_len) {
   1121     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_seal, CIPHER_R_TOO_LARGE);
   1122     return 0;
   1123   }
   1124 
   1125   if (max_out_len < in_len + 8) {
   1126     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_seal,
   1127                       CIPHER_R_BUFFER_TOO_SMALL);
   1128     return 0;
   1129   }
   1130 
   1131   if (AES_set_encrypt_key(kw_ctx->key, kw_ctx->key_bits, &ks.ks) < 0) {
   1132     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_seal,
   1133                       CIPHER_R_AES_KEY_SETUP_FAILED);
   1134     return 0;
   1135   }
   1136 
   1137   memmove(out + 8, in, in_len);
   1138   memcpy(A, nonce, 8);
   1139 
   1140   for (j = 0; j < 6; j++) {
   1141     for (i = 1; i <= n; i++) {
   1142       uint32_t t;
   1143 
   1144       memcpy(A + 8, out + 8 * i, 8);
   1145       AES_encrypt(A, A, &ks.ks);
   1146       t = n * j + i;
   1147       A[7] ^= t & 0xff;
   1148       A[6] ^= (t >> 8) & 0xff;
   1149       A[5] ^= (t >> 16) & 0xff;
   1150       A[4] ^= (t >> 24) & 0xff;
   1151       memcpy(out + 8 * i, A + 8, 8);
   1152     }
   1153   }
   1154 
   1155   memcpy(out, A, 8);
   1156   *out_len = in_len + 8;
   1157   return 1;
   1158 }
   1159 
   1160 static int aead_aes_key_wrap_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
   1161                                   size_t *out_len, size_t max_out_len,
   1162                                   const uint8_t *nonce, size_t nonce_len,
   1163                                   const uint8_t *in, size_t in_len,
   1164                                   const uint8_t *ad, size_t ad_len) {
   1165   const struct aead_aes_key_wrap_ctx *kw_ctx = ctx->aead_state;
   1166   union {
   1167     double align;
   1168     AES_KEY ks;
   1169   } ks;
   1170   /* Variables in this function match up with the variables in the second half
   1171    * of section 2.2.1. */
   1172   unsigned i, j, n;
   1173   uint8_t A[AES_BLOCK_SIZE];
   1174 
   1175   if (ad_len != 0) {
   1176     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_open,
   1177                       CIPHER_R_UNSUPPORTED_AD_SIZE);
   1178     return 0;
   1179   }
   1180 
   1181   if (nonce_len == 0) {
   1182     nonce = kDefaultAESKeyWrapNonce;
   1183     nonce_len = sizeof(kDefaultAESKeyWrapNonce);
   1184   }
   1185 
   1186   if (nonce_len != 8) {
   1187     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_open,
   1188                       CIPHER_R_UNSUPPORTED_NONCE_SIZE);
   1189     return 0;
   1190   }
   1191 
   1192   if (in_len % 8 != 0) {
   1193     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_open,
   1194                       CIPHER_R_UNSUPPORTED_INPUT_SIZE);
   1195     return 0;
   1196   }
   1197 
   1198   /* The code below only handles a 32-bit |t| thus 6*|n| must be less than
   1199    * 2^32, where |n| is |in_len| / 8. So in_len < 4/3 * 2^32 and we
   1200    * conservatively cap it to 2^32-8 to stop 32-bit platforms complaining that
   1201    * a comparision is always true. */
   1202   if (in_len > 0xfffffff8) {
   1203     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_open, CIPHER_R_TOO_LARGE);
   1204     return 0;
   1205   }
   1206 
   1207   if (in_len < 24) {
   1208     OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_open, CIPHER_R_BAD_DECRYPT);
   1209     return 0;
   1210   }
   1211 
   1212   n = (in_len / 8) - 1;
   1213 
   1214   if (max_out_len < in_len - 8) {
   1215     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_open,
   1216                       CIPHER_R_BUFFER_TOO_SMALL);
   1217     return 0;
   1218   }
   1219 
   1220   if (AES_set_decrypt_key(kw_ctx->key, kw_ctx->key_bits, &ks.ks) < 0) {
   1221     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_open,
   1222                       CIPHER_R_AES_KEY_SETUP_FAILED);
   1223     return 0;
   1224   }
   1225 
   1226   memcpy(A, in, 8);
   1227   memmove(out, in + 8, in_len - 8);
   1228 
   1229   for (j = 5; j < 6; j--) {
   1230     for (i = n; i > 0; i--) {
   1231       uint32_t t;
   1232 
   1233       t = n * j + i;
   1234       A[7] ^= t & 0xff;
   1235       A[6] ^= (t >> 8) & 0xff;
   1236       A[5] ^= (t >> 16) & 0xff;
   1237       A[4] ^= (t >> 24) & 0xff;
   1238       memcpy(A + 8, out + 8 * (i - 1), 8);
   1239       AES_decrypt(A, A, &ks.ks);
   1240       memcpy(out + 8 * (i - 1), A + 8, 8);
   1241     }
   1242   }
   1243 
   1244   if (CRYPTO_memcmp(A, nonce, 8) != 0) {
   1245     OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_open, CIPHER_R_BAD_DECRYPT);
   1246     return 0;
   1247   }
   1248 
   1249   *out_len = in_len - 8;
   1250   return 1;
   1251 }
   1252 
   1253 static const EVP_AEAD aead_aes_128_key_wrap = {
   1254     16, /* key len */
   1255     8,  /* nonce len */
   1256     8,  /* overhead */
   1257     8,  /* max tag length */
   1258     aead_aes_key_wrap_init, aead_aes_key_wrap_cleanup,
   1259     aead_aes_key_wrap_seal, aead_aes_key_wrap_open,
   1260 };
   1261 
   1262 static const EVP_AEAD aead_aes_256_key_wrap = {
   1263     32, /* key len */
   1264     8,  /* nonce len */
   1265     8,  /* overhead */
   1266     8,  /* max tag length */
   1267     aead_aes_key_wrap_init, aead_aes_key_wrap_cleanup,
   1268     aead_aes_key_wrap_seal, aead_aes_key_wrap_open,
   1269 };
   1270 
   1271 const EVP_AEAD *EVP_aead_aes_128_key_wrap(void) { return &aead_aes_128_key_wrap; }
   1272 
   1273 const EVP_AEAD *EVP_aead_aes_256_key_wrap(void) { return &aead_aes_256_key_wrap; }
   1274 
   1275 int EVP_has_aes_hardware(void) {
   1276 #if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
   1277   return aesni_capable() && crypto_gcm_clmul_enabled();
   1278 #else
   1279   return 0;
   1280 #endif
   1281 }
   1282