"Fossies" - the Fresh Open Source Software Archive

Member "openssl-1.1.1g/crypto/evp/e_aes.c" (21 Apr 2020, 146254 Bytes) of package /linux/misc/openssl-1.1.1g.tar.gz:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) C and C++ source code syntax highlighting (style: standard) with prefixed line numbers and code folding option. Alternatively you can here view or download the uninterpreted source code file. For more information about "e_aes.c" see the Fossies "Dox" file reference documentation and the last Fossies "Diffs" side-by-side code changes report: 1.1.1f_vs_1.1.1g.

    1 /*
    2  * Copyright 2001-2020 The OpenSSL Project Authors. All Rights Reserved.
    3  *
    4  * Licensed under the OpenSSL license (the "License").  You may not use
    5  * this file except in compliance with the License.  You can obtain a copy
    6  * in the file LICENSE in the source distribution or at
    7  * https://www.openssl.org/source/license.html
    8  */
    9 
   10 #include <openssl/opensslconf.h>
   11 #include <openssl/crypto.h>
   12 #include <openssl/evp.h>
   13 #include <openssl/err.h>
   14 #include <string.h>
   15 #include <assert.h>
   16 #include <openssl/aes.h>
   17 #include "crypto/evp.h"
   18 #include "modes_local.h"
   19 #include <openssl/rand.h>
   20 #include "evp_local.h"
   21 
   22 typedef struct {
   23     union {
   24         double align;
   25         AES_KEY ks;
   26     } ks;
   27     block128_f block;
   28     union {
   29         cbc128_f cbc;
   30         ctr128_f ctr;
   31     } stream;
   32 } EVP_AES_KEY;
   33 
   34 typedef struct {
   35     union {
   36         double align;
   37         AES_KEY ks;
   38     } ks;                       /* AES key schedule to use */
   39     int key_set;                /* Set if key initialised */
   40     int iv_set;                 /* Set if an iv is set */
   41     GCM128_CONTEXT gcm;
   42     unsigned char *iv;          /* Temporary IV store */
   43     int ivlen;                  /* IV length */
   44     int taglen;
   45     int iv_gen;                 /* It is OK to generate IVs */
   46     int tls_aad_len;            /* TLS AAD length */
   47     ctr128_f ctr;
   48 } EVP_AES_GCM_CTX;
   49 
   50 typedef struct {
   51     union {
   52         double align;
   53         AES_KEY ks;
   54     } ks1, ks2;                 /* AES key schedules to use */
   55     XTS128_CONTEXT xts;
   56     void (*stream) (const unsigned char *in,
   57                     unsigned char *out, size_t length,
   58                     const AES_KEY *key1, const AES_KEY *key2,
   59                     const unsigned char iv[16]);
   60 } EVP_AES_XTS_CTX;
   61 
   62 typedef struct {
   63     union {
   64         double align;
   65         AES_KEY ks;
   66     } ks;                       /* AES key schedule to use */
   67     int key_set;                /* Set if key initialised */
   68     int iv_set;                 /* Set if an iv is set */
   69     int tag_set;                /* Set if tag is valid */
   70     int len_set;                /* Set if message length set */
   71     int L, M;                   /* L and M parameters from RFC3610 */
   72     int tls_aad_len;            /* TLS AAD length */
   73     CCM128_CONTEXT ccm;
   74     ccm128_f str;
   75 } EVP_AES_CCM_CTX;
   76 
   77 #ifndef OPENSSL_NO_OCB
   78 typedef struct {
   79     union {
   80         double align;
   81         AES_KEY ks;
   82     } ksenc;                    /* AES key schedule to use for encryption */
   83     union {
   84         double align;
   85         AES_KEY ks;
   86     } ksdec;                    /* AES key schedule to use for decryption */
   87     int key_set;                /* Set if key initialised */
   88     int iv_set;                 /* Set if an iv is set */
   89     OCB128_CONTEXT ocb;
   90     unsigned char *iv;          /* Temporary IV store */
   91     unsigned char tag[16];
   92     unsigned char data_buf[16]; /* Store partial data blocks */
   93     unsigned char aad_buf[16];  /* Store partial AAD blocks */
   94     int data_buf_len;
   95     int aad_buf_len;
   96     int ivlen;                  /* IV length */
   97     int taglen;
   98 } EVP_AES_OCB_CTX;
   99 #endif
  100 
  101 #define MAXBITCHUNK     ((size_t)1<<(sizeof(size_t)*8-4))
  102 
  103 #ifdef VPAES_ASM
  104 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
  105                           AES_KEY *key);
  106 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
  107                           AES_KEY *key);
  108 
  109 void vpaes_encrypt(const unsigned char *in, unsigned char *out,
  110                    const AES_KEY *key);
  111 void vpaes_decrypt(const unsigned char *in, unsigned char *out,
  112                    const AES_KEY *key);
  113 
  114 void vpaes_cbc_encrypt(const unsigned char *in,
  115                        unsigned char *out,
  116                        size_t length,
  117                        const AES_KEY *key, unsigned char *ivec, int enc);
  118 #endif
  119 #ifdef BSAES_ASM
  120 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
  121                        size_t length, const AES_KEY *key,
  122                        unsigned char ivec[16], int enc);
  123 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
  124                                 size_t len, const AES_KEY *key,
  125                                 const unsigned char ivec[16]);
  126 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
  127                        size_t len, const AES_KEY *key1,
  128                        const AES_KEY *key2, const unsigned char iv[16]);
  129 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
  130                        size_t len, const AES_KEY *key1,
  131                        const AES_KEY *key2, const unsigned char iv[16]);
  132 #endif
  133 #if !defined(AES_ASM) && !defined(AES_CTR_ASM)          \
  134     && defined(OPENSSL_AES_CONST_TIME)          \
  135     && !defined(OPENSSL_SMALL_FOOTPRINT)
  136 # define AES_CTR_ASM
  137 #endif
  138 #ifdef AES_CTR_ASM
  139 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
  140                        size_t blocks, const AES_KEY *key,
  141                        const unsigned char ivec[AES_BLOCK_SIZE]);
  142 #endif
  143 #ifdef AES_XTS_ASM
  144 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len,
  145                      const AES_KEY *key1, const AES_KEY *key2,
  146                      const unsigned char iv[16]);
  147 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len,
  148                      const AES_KEY *key1, const AES_KEY *key2,
  149                      const unsigned char iv[16]);
  150 #endif
  151 
  152 /* increment counter (64-bit int) by 1 */
  153 static void ctr64_inc(unsigned char *counter)
  154 {
  155     int n = 8;
  156     unsigned char c;
  157 
  158     do {
  159         --n;
  160         c = counter[n];
  161         ++c;
  162         counter[n] = c;
  163         if (c)
  164             return;
  165     } while (n);
  166 }
  167 
  168 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
  169 # include "ppc_arch.h"
  170 # ifdef VPAES_ASM
  171 #  define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
  172 # endif
  173 # define HWAES_CAPABLE  (OPENSSL_ppccap_P & PPC_CRYPTO207)
  174 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
  175 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
  176 # define HWAES_encrypt aes_p8_encrypt
  177 # define HWAES_decrypt aes_p8_decrypt
  178 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
  179 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
  180 # define HWAES_xts_encrypt aes_p8_xts_encrypt
  181 # define HWAES_xts_decrypt aes_p8_xts_decrypt
  182 #endif
  183 
  184 #if     defined(OPENSSL_CPUID_OBJ) &&                   (  \
  185         ((defined(__i386)       || defined(__i386__)    || \
  186           defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
  187         defined(__x86_64)       || defined(__x86_64__)  || \
  188         defined(_M_AMD64)       || defined(_M_X64)      )
  189 
  190 extern unsigned int OPENSSL_ia32cap_P[];
  191 
  192 # ifdef VPAES_ASM
  193 #  define VPAES_CAPABLE   (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
  194 # endif
  195 # ifdef BSAES_ASM
  196 #  define BSAES_CAPABLE   (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
  197 # endif
  198 /*
  199  * AES-NI section
  200  */
  201 # define AESNI_CAPABLE   (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
  202 
  203 int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
  204                           AES_KEY *key);
  205 int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
  206                           AES_KEY *key);
  207 
  208 void aesni_encrypt(const unsigned char *in, unsigned char *out,
  209                    const AES_KEY *key);
  210 void aesni_decrypt(const unsigned char *in, unsigned char *out,
  211                    const AES_KEY *key);
  212 
  213 void aesni_ecb_encrypt(const unsigned char *in,
  214                        unsigned char *out,
  215                        size_t length, const AES_KEY *key, int enc);
  216 void aesni_cbc_encrypt(const unsigned char *in,
  217                        unsigned char *out,
  218                        size_t length,
  219                        const AES_KEY *key, unsigned char *ivec, int enc);
  220 
  221 void aesni_ctr32_encrypt_blocks(const unsigned char *in,
  222                                 unsigned char *out,
  223                                 size_t blocks,
  224                                 const void *key, const unsigned char *ivec);
  225 
  226 void aesni_xts_encrypt(const unsigned char *in,
  227                        unsigned char *out,
  228                        size_t length,
  229                        const AES_KEY *key1, const AES_KEY *key2,
  230                        const unsigned char iv[16]);
  231 
  232 void aesni_xts_decrypt(const unsigned char *in,
  233                        unsigned char *out,
  234                        size_t length,
  235                        const AES_KEY *key1, const AES_KEY *key2,
  236                        const unsigned char iv[16]);
  237 
  238 void aesni_ccm64_encrypt_blocks(const unsigned char *in,
  239                                 unsigned char *out,
  240                                 size_t blocks,
  241                                 const void *key,
  242                                 const unsigned char ivec[16],
  243                                 unsigned char cmac[16]);
  244 
  245 void aesni_ccm64_decrypt_blocks(const unsigned char *in,
  246                                 unsigned char *out,
  247                                 size_t blocks,
  248                                 const void *key,
  249                                 const unsigned char ivec[16],
  250                                 unsigned char cmac[16]);
  251 
  252 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
  253 size_t aesni_gcm_encrypt(const unsigned char *in,
  254                          unsigned char *out,
  255                          size_t len,
  256                          const void *key, unsigned char ivec[16], u64 *Xi);
  257 #  define AES_gcm_encrypt aesni_gcm_encrypt
  258 size_t aesni_gcm_decrypt(const unsigned char *in,
  259                          unsigned char *out,
  260                          size_t len,
  261                          const void *key, unsigned char ivec[16], u64 *Xi);
  262 #  define AES_gcm_decrypt aesni_gcm_decrypt
  263 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
  264                    size_t len);
  265 #  define AES_GCM_ASM(gctx)       (gctx->ctr==aesni_ctr32_encrypt_blocks && \
  266                                  gctx->gcm.ghash==gcm_ghash_avx)
  267 #  define AES_GCM_ASM2(gctx)      (gctx->gcm.block==(block128_f)aesni_encrypt && \
  268                                  gctx->gcm.ghash==gcm_ghash_avx)
  269 #  undef AES_GCM_ASM2          /* minor size optimization */
  270 # endif
  271 
  272 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  273                           const unsigned char *iv, int enc)
  274 {
  275     int ret, mode;
  276     EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
  277 
  278     mode = EVP_CIPHER_CTX_mode(ctx);
  279     if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
  280         && !enc) {
  281         ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
  282                                     &dat->ks.ks);
  283         dat->block = (block128_f) aesni_decrypt;
  284         dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
  285             (cbc128_f) aesni_cbc_encrypt : NULL;
  286     } else {
  287         ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
  288                                     &dat->ks.ks);
  289         dat->block = (block128_f) aesni_encrypt;
  290         if (mode == EVP_CIPH_CBC_MODE)
  291             dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
  292         else if (mode == EVP_CIPH_CTR_MODE)
  293             dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
  294         else
  295             dat->stream.cbc = NULL;
  296     }
  297 
  298     if (ret < 0) {
  299         EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
  300         return 0;
  301     }
  302 
  303     return 1;
  304 }
  305 
  306 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  307                             const unsigned char *in, size_t len)
  308 {
  309     aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
  310                       EVP_CIPHER_CTX_iv_noconst(ctx),
  311                       EVP_CIPHER_CTX_encrypting(ctx));
  312 
  313     return 1;
  314 }
  315 
  316 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  317                             const unsigned char *in, size_t len)
  318 {
  319     size_t bl = EVP_CIPHER_CTX_block_size(ctx);
  320 
  321     if (len < bl)
  322         return 1;
  323 
  324     aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
  325                       EVP_CIPHER_CTX_encrypting(ctx));
  326 
  327     return 1;
  328 }
  329 
  330 # define aesni_ofb_cipher aes_ofb_cipher
  331 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  332                             const unsigned char *in, size_t len);
  333 
  334 # define aesni_cfb_cipher aes_cfb_cipher
  335 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  336                             const unsigned char *in, size_t len);
  337 
  338 # define aesni_cfb8_cipher aes_cfb8_cipher
  339 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  340                              const unsigned char *in, size_t len);
  341 
  342 # define aesni_cfb1_cipher aes_cfb1_cipher
  343 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  344                              const unsigned char *in, size_t len);
  345 
  346 # define aesni_ctr_cipher aes_ctr_cipher
  347 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  348                             const unsigned char *in, size_t len);
  349 
  350 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  351                               const unsigned char *iv, int enc)
  352 {
  353     EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
  354     if (!iv && !key)
  355         return 1;
  356     if (key) {
  357         aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
  358                               &gctx->ks.ks);
  359         CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
  360         gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
  361         /*
  362          * If we have an iv can set it directly, otherwise use saved IV.
  363          */
  364         if (iv == NULL && gctx->iv_set)
  365             iv = gctx->iv;
  366         if (iv) {
  367             CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  368             gctx->iv_set = 1;
  369         }
  370         gctx->key_set = 1;
  371     } else {
  372         /* If key set use IV, otherwise copy */
  373         if (gctx->key_set)
  374             CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  375         else
  376             memcpy(gctx->iv, iv, gctx->ivlen);
  377         gctx->iv_set = 1;
  378         gctx->iv_gen = 0;
  379     }
  380     return 1;
  381 }
  382 
  383 # define aesni_gcm_cipher aes_gcm_cipher
  384 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  385                             const unsigned char *in, size_t len);
  386 
  387 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  388                               const unsigned char *iv, int enc)
  389 {
  390     EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
  391 
  392     if (!iv && !key)
  393         return 1;
  394 
  395     if (key) {
  396         /* The key is two half length keys in reality */
  397         const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
  398 
  399         /*
  400          * Verify that the two keys are different.
  401          * 
  402          * This addresses Rogaway's vulnerability.
  403          * See comment in aes_xts_init_key() below.
  404          */
  405         if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
  406             EVPerr(EVP_F_AESNI_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
  407             return 0;
  408         }
  409 
  410         /* key_len is two AES keys */
  411         if (enc) {
  412             aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
  413                                   &xctx->ks1.ks);
  414             xctx->xts.block1 = (block128_f) aesni_encrypt;
  415             xctx->stream = aesni_xts_encrypt;
  416         } else {
  417             aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
  418                                   &xctx->ks1.ks);
  419             xctx->xts.block1 = (block128_f) aesni_decrypt;
  420             xctx->stream = aesni_xts_decrypt;
  421         }
  422 
  423         aesni_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
  424                               EVP_CIPHER_CTX_key_length(ctx) * 4,
  425                               &xctx->ks2.ks);
  426         xctx->xts.block2 = (block128_f) aesni_encrypt;
  427 
  428         xctx->xts.key1 = &xctx->ks1;
  429     }
  430 
  431     if (iv) {
  432         xctx->xts.key2 = &xctx->ks2;
  433         memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
  434     }
  435 
  436     return 1;
  437 }
  438 
  439 # define aesni_xts_cipher aes_xts_cipher
  440 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  441                             const unsigned char *in, size_t len);
  442 
  443 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  444                               const unsigned char *iv, int enc)
  445 {
  446     EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
  447     if (!iv && !key)
  448         return 1;
  449     if (key) {
  450         aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
  451                               &cctx->ks.ks);
  452         CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
  453                            &cctx->ks, (block128_f) aesni_encrypt);
  454         cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
  455             (ccm128_f) aesni_ccm64_decrypt_blocks;
  456         cctx->key_set = 1;
  457     }
  458     if (iv) {
  459         memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
  460         cctx->iv_set = 1;
  461     }
  462     return 1;
  463 }
  464 
  465 # define aesni_ccm_cipher aes_ccm_cipher
  466 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  467                             const unsigned char *in, size_t len);
  468 
  469 # ifndef OPENSSL_NO_OCB
  470 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
  471                        size_t blocks, const void *key,
  472                        size_t start_block_num,
  473                        unsigned char offset_i[16],
  474                        const unsigned char L_[][16],
  475                        unsigned char checksum[16]);
  476 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out,
  477                        size_t blocks, const void *key,
  478                        size_t start_block_num,
  479                        unsigned char offset_i[16],
  480                        const unsigned char L_[][16],
  481                        unsigned char checksum[16]);
  482 
  483 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  484                               const unsigned char *iv, int enc)
  485 {
  486     EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
  487     if (!iv && !key)
  488         return 1;
  489     if (key) {
  490         do {
  491             /*
  492              * We set both the encrypt and decrypt key here because decrypt
  493              * needs both. We could possibly optimise to remove setting the
  494              * decrypt for an encryption operation.
  495              */
  496             aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
  497                                   &octx->ksenc.ks);
  498             aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
  499                                   &octx->ksdec.ks);
  500             if (!CRYPTO_ocb128_init(&octx->ocb,
  501                                     &octx->ksenc.ks, &octx->ksdec.ks,
  502                                     (block128_f) aesni_encrypt,
  503                                     (block128_f) aesni_decrypt,
  504                                     enc ? aesni_ocb_encrypt
  505                                         : aesni_ocb_decrypt))
  506                 return 0;
  507         }
  508         while (0);
  509 
  510         /*
  511          * If we have an iv we can set it directly, otherwise use saved IV.
  512          */
  513         if (iv == NULL && octx->iv_set)
  514             iv = octx->iv;
  515         if (iv) {
  516             if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
  517                 != 1)
  518                 return 0;
  519             octx->iv_set = 1;
  520         }
  521         octx->key_set = 1;
  522     } else {
  523         /* If key set use IV, otherwise copy */
  524         if (octx->key_set)
  525             CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
  526         else
  527             memcpy(octx->iv, iv, octx->ivlen);
  528         octx->iv_set = 1;
  529     }
  530     return 1;
  531 }
  532 
  533 #  define aesni_ocb_cipher aes_ocb_cipher
  534 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  535                             const unsigned char *in, size_t len);
  536 # endif                        /* OPENSSL_NO_OCB */
  537 
  538 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
  539 static const EVP_CIPHER aesni_##keylen##_##mode = { \
  540         nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
  541         flags|EVP_CIPH_##MODE##_MODE,   \
  542         aesni_init_key,                 \
  543         aesni_##mode##_cipher,          \
  544         NULL,                           \
  545         sizeof(EVP_AES_KEY),            \
  546         NULL,NULL,NULL,NULL }; \
  547 static const EVP_CIPHER aes_##keylen##_##mode = { \
  548         nid##_##keylen##_##nmode,blocksize,     \
  549         keylen/8,ivlen, \
  550         flags|EVP_CIPH_##MODE##_MODE,   \
  551         aes_init_key,                   \
  552         aes_##mode##_cipher,            \
  553         NULL,                           \
  554         sizeof(EVP_AES_KEY),            \
  555         NULL,NULL,NULL,NULL }; \
  556 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
  557 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
  558 
  559 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
  560 static const EVP_CIPHER aesni_##keylen##_##mode = { \
  561         nid##_##keylen##_##mode,blocksize, \
  562         (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
  563         flags|EVP_CIPH_##MODE##_MODE,   \
  564         aesni_##mode##_init_key,        \
  565         aesni_##mode##_cipher,          \
  566         aes_##mode##_cleanup,           \
  567         sizeof(EVP_AES_##MODE##_CTX),   \
  568         NULL,NULL,aes_##mode##_ctrl,NULL }; \
  569 static const EVP_CIPHER aes_##keylen##_##mode = { \
  570         nid##_##keylen##_##mode,blocksize, \
  571         (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
  572         flags|EVP_CIPH_##MODE##_MODE,   \
  573         aes_##mode##_init_key,          \
  574         aes_##mode##_cipher,            \
  575         aes_##mode##_cleanup,           \
  576         sizeof(EVP_AES_##MODE##_CTX),   \
  577         NULL,NULL,aes_##mode##_ctrl,NULL }; \
  578 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
  579 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
  580 
  581 #elif   defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
  582 
  583 # include "sparc_arch.h"
  584 
  585 extern unsigned int OPENSSL_sparcv9cap_P[];
  586 
  587 /*
  588  * Initial Fujitsu SPARC64 X support
  589  */
  590 # define HWAES_CAPABLE           (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX)
  591 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key
  592 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key
  593 # define HWAES_encrypt aes_fx_encrypt
  594 # define HWAES_decrypt aes_fx_decrypt
  595 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt
  596 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks
  597 
  598 # define SPARC_AES_CAPABLE       (OPENSSL_sparcv9cap_P[1] & CFR_AES)
  599 
  600 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
  601 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
  602 void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
  603                     const AES_KEY *key);
  604 void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
  605                     const AES_KEY *key);
  606 /*
  607  * Key-length specific subroutines were chosen for following reason.
  608  * Each SPARC T4 core can execute up to 8 threads which share core's
  609  * resources. Loading as much key material to registers allows to
  610  * minimize references to shared memory interface, as well as amount
  611  * of instructions in inner loops [much needed on T4]. But then having
  612  * non-key-length specific routines would require conditional branches
  613  * either in inner loops or on subroutines' entries. Former is hardly
  614  * acceptable, while latter means code size increase to size occupied
  615  * by multiple key-length specific subroutines, so why fight?
  616  */
  617 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
  618                            size_t len, const AES_KEY *key,
  619                            unsigned char *ivec);
  620 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
  621                            size_t len, const AES_KEY *key,
  622                            unsigned char *ivec);
  623 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
  624                            size_t len, const AES_KEY *key,
  625                            unsigned char *ivec);
  626 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
  627                            size_t len, const AES_KEY *key,
  628                            unsigned char *ivec);
  629 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
  630                            size_t len, const AES_KEY *key,
  631                            unsigned char *ivec);
  632 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
  633                            size_t len, const AES_KEY *key,
  634                            unsigned char *ivec);
  635 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
  636                              size_t blocks, const AES_KEY *key,
  637                              unsigned char *ivec);
  638 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
  639                              size_t blocks, const AES_KEY *key,
  640                              unsigned char *ivec);
  641 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
  642                              size_t blocks, const AES_KEY *key,
  643                              unsigned char *ivec);
  644 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
  645                            size_t blocks, const AES_KEY *key1,
  646                            const AES_KEY *key2, const unsigned char *ivec);
  647 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
  648                            size_t blocks, const AES_KEY *key1,
  649                            const AES_KEY *key2, const unsigned char *ivec);
  650 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
  651                            size_t blocks, const AES_KEY *key1,
  652                            const AES_KEY *key2, const unsigned char *ivec);
  653 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
  654                            size_t blocks, const AES_KEY *key1,
  655                            const AES_KEY *key2, const unsigned char *ivec);
  656 
  657 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  658                            const unsigned char *iv, int enc)
  659 {
  660     int ret, mode, bits;
  661     EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
  662 
  663     mode = EVP_CIPHER_CTX_mode(ctx);
  664     bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
  665     if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
  666         && !enc) {
  667         ret = 0;
  668         aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
  669         dat->block = (block128_f) aes_t4_decrypt;
  670         switch (bits) {
  671         case 128:
  672             dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
  673                 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
  674             break;
  675         case 192:
  676             dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
  677                 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
  678             break;
  679         case 256:
  680             dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
  681                 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
  682             break;
  683         default:
  684             ret = -1;
  685         }
  686     } else {
  687         ret = 0;
  688         aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
  689         dat->block = (block128_f) aes_t4_encrypt;
  690         switch (bits) {
  691         case 128:
  692             if (mode == EVP_CIPH_CBC_MODE)
  693                 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
  694             else if (mode == EVP_CIPH_CTR_MODE)
  695                 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
  696             else
  697                 dat->stream.cbc = NULL;
  698             break;
  699         case 192:
  700             if (mode == EVP_CIPH_CBC_MODE)
  701                 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
  702             else if (mode == EVP_CIPH_CTR_MODE)
  703                 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
  704             else
  705                 dat->stream.cbc = NULL;
  706             break;
  707         case 256:
  708             if (mode == EVP_CIPH_CBC_MODE)
  709                 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
  710             else if (mode == EVP_CIPH_CTR_MODE)
  711                 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
  712             else
  713                 dat->stream.cbc = NULL;
  714             break;
  715         default:
  716             ret = -1;
  717         }
  718     }
  719 
  720     if (ret < 0) {
  721         EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
  722         return 0;
  723     }
  724 
  725     return 1;
  726 }
  727 
  728 # define aes_t4_cbc_cipher aes_cbc_cipher
  729 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  730                              const unsigned char *in, size_t len);
  731 
  732 # define aes_t4_ecb_cipher aes_ecb_cipher
  733 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  734                              const unsigned char *in, size_t len);
  735 
  736 # define aes_t4_ofb_cipher aes_ofb_cipher
  737 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  738                              const unsigned char *in, size_t len);
  739 
  740 # define aes_t4_cfb_cipher aes_cfb_cipher
  741 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  742                              const unsigned char *in, size_t len);
  743 
  744 # define aes_t4_cfb8_cipher aes_cfb8_cipher
  745 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  746                               const unsigned char *in, size_t len);
  747 
  748 # define aes_t4_cfb1_cipher aes_cfb1_cipher
  749 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  750                               const unsigned char *in, size_t len);
  751 
  752 # define aes_t4_ctr_cipher aes_ctr_cipher
  753 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  754                              const unsigned char *in, size_t len);
  755 
  756 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  757                                const unsigned char *iv, int enc)
  758 {
  759     EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
  760     if (!iv && !key)
  761         return 1;
  762     if (key) {
  763         int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
  764         aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
  765         CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
  766                            (block128_f) aes_t4_encrypt);
  767         switch (bits) {
  768         case 128:
  769             gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
  770             break;
  771         case 192:
  772             gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
  773             break;
  774         case 256:
  775             gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
  776             break;
  777         default:
  778             return 0;
  779         }
  780         /*
  781          * If we have an iv can set it directly, otherwise use saved IV.
  782          */
  783         if (iv == NULL && gctx->iv_set)
  784             iv = gctx->iv;
  785         if (iv) {
  786             CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  787             gctx->iv_set = 1;
  788         }
  789         gctx->key_set = 1;
  790     } else {
  791         /* If key set use IV, otherwise copy */
  792         if (gctx->key_set)
  793             CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  794         else
  795             memcpy(gctx->iv, iv, gctx->ivlen);
  796         gctx->iv_set = 1;
  797         gctx->iv_gen = 0;
  798     }
  799     return 1;
  800 }
  801 
  802 # define aes_t4_gcm_cipher aes_gcm_cipher
  803 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  804                              const unsigned char *in, size_t len);
  805 
  806 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  807                                const unsigned char *iv, int enc)
  808 {
  809     EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
  810 
  811     if (!iv && !key)
  812         return 1;
  813 
  814     if (key) {
  815         /* The key is two half length keys in reality */
  816         const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
  817         const int bits = bytes * 8;
  818 
  819         /*
  820          * Verify that the two keys are different.
  821          * 
  822          * This addresses Rogaway's vulnerability.
  823          * See comment in aes_xts_init_key() below.
  824          */
  825         if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
  826             EVPerr(EVP_F_AES_T4_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
  827             return 0;
  828         }
  829 
  830         xctx->stream = NULL;
  831         /* key_len is two AES keys */
  832         if (enc) {
  833             aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
  834             xctx->xts.block1 = (block128_f) aes_t4_encrypt;
  835             switch (bits) {
  836             case 128:
  837                 xctx->stream = aes128_t4_xts_encrypt;
  838                 break;
  839             case 256:
  840                 xctx->stream = aes256_t4_xts_encrypt;
  841                 break;
  842             default:
  843                 return 0;
  844             }
  845         } else {
  846             aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
  847                                    &xctx->ks1.ks);
  848             xctx->xts.block1 = (block128_f) aes_t4_decrypt;
  849             switch (bits) {
  850             case 128:
  851                 xctx->stream = aes128_t4_xts_decrypt;
  852                 break;
  853             case 256:
  854                 xctx->stream = aes256_t4_xts_decrypt;
  855                 break;
  856             default:
  857                 return 0;
  858             }
  859         }
  860 
  861         aes_t4_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
  862                                EVP_CIPHER_CTX_key_length(ctx) * 4,
  863                                &xctx->ks2.ks);
  864         xctx->xts.block2 = (block128_f) aes_t4_encrypt;
  865 
  866         xctx->xts.key1 = &xctx->ks1;
  867     }
  868 
  869     if (iv) {
  870         xctx->xts.key2 = &xctx->ks2;
  871         memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
  872     }
  873 
  874     return 1;
  875 }
  876 
  877 # define aes_t4_xts_cipher aes_xts_cipher
  878 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  879                              const unsigned char *in, size_t len);
  880 
  881 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  882                                const unsigned char *iv, int enc)
  883 {
  884     EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
  885     if (!iv && !key)
  886         return 1;
  887     if (key) {
  888         int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
  889         aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
  890         CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
  891                            &cctx->ks, (block128_f) aes_t4_encrypt);
  892         cctx->str = NULL;
  893         cctx->key_set = 1;
  894     }
  895     if (iv) {
  896         memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
  897         cctx->iv_set = 1;
  898     }
  899     return 1;
  900 }
  901 
  902 # define aes_t4_ccm_cipher aes_ccm_cipher
  903 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  904                              const unsigned char *in, size_t len);
  905 
  906 # ifndef OPENSSL_NO_OCB
  907 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  908                                const unsigned char *iv, int enc)
  909 {
  910     EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
  911     if (!iv && !key)
  912         return 1;
  913     if (key) {
  914         do {
  915             /*
  916              * We set both the encrypt and decrypt key here because decrypt
  917              * needs both. We could possibly optimise to remove setting the
  918              * decrypt for an encryption operation.
  919              */
  920             aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
  921                                    &octx->ksenc.ks);
  922             aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
  923                                    &octx->ksdec.ks);
  924             if (!CRYPTO_ocb128_init(&octx->ocb,
  925                                     &octx->ksenc.ks, &octx->ksdec.ks,
  926                                     (block128_f) aes_t4_encrypt,
  927                                     (block128_f) aes_t4_decrypt,
  928                                     NULL))
  929                 return 0;
  930         }
  931         while (0);
  932 
  933         /*
  934          * If we have an iv we can set it directly, otherwise use saved IV.
  935          */
  936         if (iv == NULL && octx->iv_set)
  937             iv = octx->iv;
  938         if (iv) {
  939             if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
  940                 != 1)
  941                 return 0;
  942             octx->iv_set = 1;
  943         }
  944         octx->key_set = 1;
  945     } else {
  946         /* If key set use IV, otherwise copy */
  947         if (octx->key_set)
  948             CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
  949         else
  950             memcpy(octx->iv, iv, octx->ivlen);
  951         octx->iv_set = 1;
  952     }
  953     return 1;
  954 }
  955 
  956 #  define aes_t4_ocb_cipher aes_ocb_cipher
  957 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  958                              const unsigned char *in, size_t len);
  959 # endif                        /* OPENSSL_NO_OCB */
  960 
  961 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
  962 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
  963         nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
  964         flags|EVP_CIPH_##MODE##_MODE,   \
  965         aes_t4_init_key,                \
  966         aes_t4_##mode##_cipher,         \
  967         NULL,                           \
  968         sizeof(EVP_AES_KEY),            \
  969         NULL,NULL,NULL,NULL }; \
  970 static const EVP_CIPHER aes_##keylen##_##mode = { \
  971         nid##_##keylen##_##nmode,blocksize,     \
  972         keylen/8,ivlen, \
  973         flags|EVP_CIPH_##MODE##_MODE,   \
  974         aes_init_key,                   \
  975         aes_##mode##_cipher,            \
  976         NULL,                           \
  977         sizeof(EVP_AES_KEY),            \
  978         NULL,NULL,NULL,NULL }; \
  979 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
  980 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
  981 
  982 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
  983 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
  984         nid##_##keylen##_##mode,blocksize, \
  985         (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
  986         flags|EVP_CIPH_##MODE##_MODE,   \
  987         aes_t4_##mode##_init_key,       \
  988         aes_t4_##mode##_cipher,         \
  989         aes_##mode##_cleanup,           \
  990         sizeof(EVP_AES_##MODE##_CTX),   \
  991         NULL,NULL,aes_##mode##_ctrl,NULL }; \
  992 static const EVP_CIPHER aes_##keylen##_##mode = { \
  993         nid##_##keylen##_##mode,blocksize, \
  994         (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
  995         flags|EVP_CIPH_##MODE##_MODE,   \
  996         aes_##mode##_init_key,          \
  997         aes_##mode##_cipher,            \
  998         aes_##mode##_cleanup,           \
  999         sizeof(EVP_AES_##MODE##_CTX),   \
 1000         NULL,NULL,aes_##mode##_ctrl,NULL }; \
 1001 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
 1002 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
 1003 
 1004 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__)
 1005 /*
 1006  * IBM S390X support
 1007  */
 1008 # include "s390x_arch.h"
 1009 
 1010 typedef struct {
 1011     union {
 1012         double align;
 1013         /*-
 1014          * KM-AES parameter block - begin
 1015          * (see z/Architecture Principles of Operation >= SA22-7832-06)
 1016          */
 1017         struct {
 1018             unsigned char k[32];
 1019         } param;
 1020         /* KM-AES parameter block - end */
 1021     } km;
 1022     unsigned int fc;
 1023 } S390X_AES_ECB_CTX;
 1024 
 1025 typedef struct {
 1026     union {
 1027         double align;
 1028         /*-
 1029          * KMO-AES parameter block - begin
 1030          * (see z/Architecture Principles of Operation >= SA22-7832-08)
 1031          */
 1032         struct {
 1033             unsigned char cv[16];
 1034             unsigned char k[32];
 1035         } param;
 1036         /* KMO-AES parameter block - end */
 1037     } kmo;
 1038     unsigned int fc;
 1039 
 1040     int res;
 1041 } S390X_AES_OFB_CTX;
 1042 
 1043 typedef struct {
 1044     union {
 1045         double align;
 1046         /*-
 1047          * KMF-AES parameter block - begin
 1048          * (see z/Architecture Principles of Operation >= SA22-7832-08)
 1049          */
 1050         struct {
 1051             unsigned char cv[16];
 1052             unsigned char k[32];
 1053         } param;
 1054         /* KMF-AES parameter block - end */
 1055     } kmf;
 1056     unsigned int fc;
 1057 
 1058     int res;
 1059 } S390X_AES_CFB_CTX;
 1060 
 1061 typedef struct {
 1062     union {
 1063         double align;
 1064         /*-
 1065          * KMA-GCM-AES parameter block - begin
 1066          * (see z/Architecture Principles of Operation >= SA22-7832-11)
 1067          */
 1068         struct {
 1069             unsigned char reserved[12];
 1070             union {
 1071                 unsigned int w;
 1072                 unsigned char b[4];
 1073             } cv;
 1074             union {
 1075                 unsigned long long g[2];
 1076                 unsigned char b[16];
 1077             } t;
 1078             unsigned char h[16];
 1079             unsigned long long taadl;
 1080             unsigned long long tpcl;
 1081             union {
 1082                 unsigned long long g[2];
 1083                 unsigned int w[4];
 1084             } j0;
 1085             unsigned char k[32];
 1086         } param;
 1087         /* KMA-GCM-AES parameter block - end */
 1088     } kma;
 1089     unsigned int fc;
 1090     int key_set;
 1091 
 1092     unsigned char *iv;
 1093     int ivlen;
 1094     int iv_set;
 1095     int iv_gen;
 1096 
 1097     int taglen;
 1098 
 1099     unsigned char ares[16];
 1100     unsigned char mres[16];
 1101     unsigned char kres[16];
 1102     int areslen;
 1103     int mreslen;
 1104     int kreslen;
 1105 
 1106     int tls_aad_len;
 1107 } S390X_AES_GCM_CTX;
 1108 
 1109 typedef struct {
 1110     union {
 1111         double align;
 1112         /*-
 1113          * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
 1114          * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
 1115          * rounds field is used to store the function code and that the key
 1116          * schedule is not stored (if aes hardware support is detected).
 1117          */
 1118         struct {
 1119             unsigned char pad[16];
 1120             AES_KEY k;
 1121         } key;
 1122 
 1123         struct {
 1124             /*-
 1125              * KMAC-AES parameter block - begin
 1126              * (see z/Architecture Principles of Operation >= SA22-7832-08)
 1127              */
 1128             struct {
 1129                 union {
 1130                     unsigned long long g[2];
 1131                     unsigned char b[16];
 1132                 } icv;
 1133                 unsigned char k[32];
 1134             } kmac_param;
 1135             /* KMAC-AES parameter block - end */
 1136 
 1137             union {
 1138                 unsigned long long g[2];
 1139                 unsigned char b[16];
 1140             } nonce;
 1141             union {
 1142                 unsigned long long g[2];
 1143                 unsigned char b[16];
 1144             } buf;
 1145 
 1146             unsigned long long blocks;
 1147             int l;
 1148             int m;
 1149             int tls_aad_len;
 1150             int iv_set;
 1151             int tag_set;
 1152             int len_set;
 1153             int key_set;
 1154 
 1155             unsigned char pad[140];
 1156             unsigned int fc;
 1157         } ccm;
 1158     } aes;
 1159 } S390X_AES_CCM_CTX;
 1160 
 1161 /* Convert key size to function code: [16,24,32] -> [18,19,20]. */
 1162 # define S390X_AES_FC(keylen)  (S390X_AES_128 + ((((keylen) << 3) - 128) >> 6))
 1163 
 1164 /* Most modes of operation need km for partial block processing. */
 1165 # define S390X_aes_128_CAPABLE (OPENSSL_s390xcap_P.km[0] &  \
 1166                                 S390X_CAPBIT(S390X_AES_128))
 1167 # define S390X_aes_192_CAPABLE (OPENSSL_s390xcap_P.km[0] &  \
 1168                                 S390X_CAPBIT(S390X_AES_192))
 1169 # define S390X_aes_256_CAPABLE (OPENSSL_s390xcap_P.km[0] &  \
 1170                                 S390X_CAPBIT(S390X_AES_256))
 1171 
 1172 # define s390x_aes_init_key aes_init_key
 1173 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
 1174                               const unsigned char *iv, int enc);
 1175 
 1176 # define S390X_aes_128_cbc_CAPABLE  1   /* checked by callee */
 1177 # define S390X_aes_192_cbc_CAPABLE  1
 1178 # define S390X_aes_256_cbc_CAPABLE  1
 1179 # define S390X_AES_CBC_CTX      EVP_AES_KEY
 1180 
 1181 # define s390x_aes_cbc_init_key aes_init_key
 1182 
 1183 # define s390x_aes_cbc_cipher aes_cbc_cipher
 1184 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 1185                                 const unsigned char *in, size_t len);
 1186 
 1187 # define S390X_aes_128_ecb_CAPABLE  S390X_aes_128_CAPABLE
 1188 # define S390X_aes_192_ecb_CAPABLE  S390X_aes_192_CAPABLE
 1189 # define S390X_aes_256_ecb_CAPABLE  S390X_aes_256_CAPABLE
 1190 
 1191 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
 1192                                   const unsigned char *key,
 1193                                   const unsigned char *iv, int enc)
 1194 {
 1195     S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
 1196     const int keylen = EVP_CIPHER_CTX_key_length(ctx);
 1197 
 1198     cctx->fc = S390X_AES_FC(keylen);
 1199     if (!enc)
 1200         cctx->fc |= S390X_DECRYPT;
 1201 
 1202     memcpy(cctx->km.param.k, key, keylen);
 1203     return 1;
 1204 }
 1205 
 1206 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 1207                                 const unsigned char *in, size_t len)
 1208 {
 1209     S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
 1210 
 1211     s390x_km(in, len, out, cctx->fc, &cctx->km.param);
 1212     return 1;
 1213 }
 1214 
 1215 # define S390X_aes_128_ofb_CAPABLE (S390X_aes_128_CAPABLE &&        \
 1216                                     (OPENSSL_s390xcap_P.kmo[0] &    \
 1217                                      S390X_CAPBIT(S390X_AES_128)))
 1218 # define S390X_aes_192_ofb_CAPABLE (S390X_aes_192_CAPABLE &&        \
 1219                                     (OPENSSL_s390xcap_P.kmo[0] &    \
 1220                                      S390X_CAPBIT(S390X_AES_192)))
 1221 # define S390X_aes_256_ofb_CAPABLE (S390X_aes_256_CAPABLE &&        \
 1222                                     (OPENSSL_s390xcap_P.kmo[0] &    \
 1223                                      S390X_CAPBIT(S390X_AES_256)))
 1224 
 1225 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
 1226                                   const unsigned char *key,
 1227                                   const unsigned char *ivec, int enc)
 1228 {
 1229     S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
 1230     const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
 1231     const int keylen = EVP_CIPHER_CTX_key_length(ctx);
 1232     const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
 1233 
 1234     memcpy(cctx->kmo.param.cv, iv, ivlen);
 1235     memcpy(cctx->kmo.param.k, key, keylen);
 1236     cctx->fc = S390X_AES_FC(keylen);
 1237     cctx->res = 0;
 1238     return 1;
 1239 }
 1240 
 1241 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 1242                                 const unsigned char *in, size_t len)
 1243 {
 1244     S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
 1245     int n = cctx->res;
 1246     int rem;
 1247 
 1248     while (n && len) {
 1249         *out = *in ^ cctx->kmo.param.cv[n];
 1250         n = (n + 1) & 0xf;
 1251         --len;
 1252         ++in;
 1253         ++out;
 1254     }
 1255 
 1256     rem = len & 0xf;
 1257 
 1258     len &= ~(size_t)0xf;
 1259     if (len) {
 1260         s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
 1261 
 1262         out += len;
 1263         in += len;
 1264     }
 1265 
 1266     if (rem) {
 1267         s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
 1268                  cctx->kmo.param.k);
 1269 
 1270         while (rem--) {
 1271             out[n] = in[n] ^ cctx->kmo.param.cv[n];
 1272             ++n;
 1273         }
 1274     }
 1275 
 1276     cctx->res = n;
 1277     return 1;
 1278 }
 1279 
 1280 # define S390X_aes_128_cfb_CAPABLE (S390X_aes_128_CAPABLE &&        \
 1281                                     (OPENSSL_s390xcap_P.kmf[0] &    \
 1282                                      S390X_CAPBIT(S390X_AES_128)))
 1283 # define S390X_aes_192_cfb_CAPABLE (S390X_aes_192_CAPABLE &&        \
 1284                                     (OPENSSL_s390xcap_P.kmf[0] &    \
 1285                                      S390X_CAPBIT(S390X_AES_192)))
 1286 # define S390X_aes_256_cfb_CAPABLE (S390X_aes_256_CAPABLE &&        \
 1287                                     (OPENSSL_s390xcap_P.kmf[0] &    \
 1288                                      S390X_CAPBIT(S390X_AES_256)))
 1289 
 1290 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
 1291                                   const unsigned char *key,
 1292                                   const unsigned char *ivec, int enc)
 1293 {
 1294     S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
 1295     const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
 1296     const int keylen = EVP_CIPHER_CTX_key_length(ctx);
 1297     const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
 1298 
 1299     cctx->fc = S390X_AES_FC(keylen);
 1300     cctx->fc |= 16 << 24;   /* 16 bytes cipher feedback */
 1301     if (!enc)
 1302         cctx->fc |= S390X_DECRYPT;
 1303 
 1304     cctx->res = 0;
 1305     memcpy(cctx->kmf.param.cv, iv, ivlen);
 1306     memcpy(cctx->kmf.param.k, key, keylen);
 1307     return 1;
 1308 }
 1309 
 1310 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 1311                                 const unsigned char *in, size_t len)
 1312 {
 1313     S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
 1314     const int keylen = EVP_CIPHER_CTX_key_length(ctx);
 1315     const int enc = EVP_CIPHER_CTX_encrypting(ctx);
 1316     int n = cctx->res;
 1317     int rem;
 1318     unsigned char tmp;
 1319 
 1320     while (n && len) {
 1321         tmp = *in;
 1322         *out = cctx->kmf.param.cv[n] ^ tmp;
 1323         cctx->kmf.param.cv[n] = enc ? *out : tmp;
 1324         n = (n + 1) & 0xf;
 1325         --len;
 1326         ++in;
 1327         ++out;
 1328     }
 1329 
 1330     rem = len & 0xf;
 1331 
 1332     len &= ~(size_t)0xf;
 1333     if (len) {
 1334         s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
 1335 
 1336         out += len;
 1337         in += len;
 1338     }
 1339 
 1340     if (rem) {
 1341         s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
 1342                  S390X_AES_FC(keylen), cctx->kmf.param.k);
 1343 
 1344         while (rem--) {
 1345             tmp = in[n];
 1346             out[n] = cctx->kmf.param.cv[n] ^ tmp;
 1347             cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
 1348             ++n;
 1349         }
 1350     }
 1351 
 1352     cctx->res = n;
 1353     return 1;
 1354 }
 1355 
 1356 # define S390X_aes_128_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] &    \
 1357                                      S390X_CAPBIT(S390X_AES_128))
 1358 # define S390X_aes_192_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] &    \
 1359                                      S390X_CAPBIT(S390X_AES_192))
 1360 # define S390X_aes_256_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] &    \
 1361                                      S390X_CAPBIT(S390X_AES_256))
 1362 
 1363 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
 1364                                    const unsigned char *key,
 1365                                    const unsigned char *ivec, int enc)
 1366 {
 1367     S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
 1368     const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
 1369     const int keylen = EVP_CIPHER_CTX_key_length(ctx);
 1370     const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
 1371 
 1372     cctx->fc = S390X_AES_FC(keylen);
 1373     cctx->fc |= 1 << 24;   /* 1 byte cipher feedback */
 1374     if (!enc)
 1375         cctx->fc |= S390X_DECRYPT;
 1376 
 1377     memcpy(cctx->kmf.param.cv, iv, ivlen);
 1378     memcpy(cctx->kmf.param.k, key, keylen);
 1379     return 1;
 1380 }
 1381 
 1382 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 1383                                  const unsigned char *in, size_t len)
 1384 {
 1385     S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
 1386 
 1387     s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
 1388     return 1;
 1389 }
 1390 
 1391 # define S390X_aes_128_cfb1_CAPABLE 0
 1392 # define S390X_aes_192_cfb1_CAPABLE 0
 1393 # define S390X_aes_256_cfb1_CAPABLE 0
 1394 
 1395 # define s390x_aes_cfb1_init_key aes_init_key
 1396 
 1397 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
 1398 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 1399                                  const unsigned char *in, size_t len);
 1400 
 1401 # define S390X_aes_128_ctr_CAPABLE  1   /* checked by callee */
 1402 # define S390X_aes_192_ctr_CAPABLE  1
 1403 # define S390X_aes_256_ctr_CAPABLE  1
 1404 # define S390X_AES_CTR_CTX      EVP_AES_KEY
 1405 
 1406 # define s390x_aes_ctr_init_key aes_init_key
 1407 
 1408 # define s390x_aes_ctr_cipher aes_ctr_cipher
 1409 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 1410                                 const unsigned char *in, size_t len);
 1411 
 1412 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE &&        \
 1413                                     (OPENSSL_s390xcap_P.kma[0] &    \
 1414                                      S390X_CAPBIT(S390X_AES_128)))
 1415 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE &&        \
 1416                                     (OPENSSL_s390xcap_P.kma[0] &    \
 1417                                      S390X_CAPBIT(S390X_AES_192)))
 1418 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE &&        \
 1419                                     (OPENSSL_s390xcap_P.kma[0] &    \
 1420                                      S390X_CAPBIT(S390X_AES_256)))
 1421 
 1422 /* iv + padding length for iv lengths != 12 */
 1423 # define S390X_gcm_ivpadlen(i)  ((((i) + 15) >> 4 << 4) + 16)
 1424 
 1425 /*-
 1426  * Process additional authenticated data. Returns 0 on success. Code is
 1427  * big-endian.
 1428  */
 1429 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
 1430                              size_t len)
 1431 {
 1432     unsigned long long alen;
 1433     int n, rem;
 1434 
 1435     if (ctx->kma.param.tpcl)
 1436         return -2;
 1437 
 1438     alen = ctx->kma.param.taadl + len;
 1439     if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
 1440         return -1;
 1441     ctx->kma.param.taadl = alen;
 1442 
 1443     n = ctx->areslen;
 1444     if (n) {
 1445         while (n && len) {
 1446             ctx->ares[n] = *aad;
 1447             n = (n + 1) & 0xf;
 1448             ++aad;
 1449             --len;
 1450         }
 1451         /* ctx->ares contains a complete block if offset has wrapped around */
 1452         if (!n) {
 1453             s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
 1454             ctx->fc |= S390X_KMA_HS;
 1455         }
 1456         ctx->areslen = n;
 1457     }
 1458 
 1459     rem = len & 0xf;
 1460 
 1461     len &= ~(size_t)0xf;
 1462     if (len) {
 1463         s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
 1464         aad += len;
 1465         ctx->fc |= S390X_KMA_HS;
 1466     }
 1467 
 1468     if (rem) {
 1469         ctx->areslen = rem;
 1470 
 1471         do {
 1472             --rem;
 1473             ctx->ares[rem] = aad[rem];
 1474         } while (rem);
 1475     }
 1476     return 0;
 1477 }
 1478 
 1479 /*-
 1480  * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
 1481  * success. Code is big-endian.
 1482  */
 1483 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
 1484                          unsigned char *out, size_t len)
 1485 {
 1486     const unsigned char *inptr;
 1487     unsigned long long mlen;
 1488     union {
 1489         unsigned int w[4];
 1490         unsigned char b[16];
 1491     } buf;
 1492     size_t inlen;
 1493     int n, rem, i;
 1494 
 1495     mlen = ctx->kma.param.tpcl + len;
 1496     if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
 1497         return -1;
 1498     ctx->kma.param.tpcl = mlen;
 1499 
 1500     n = ctx->mreslen;
 1501     if (n) {
 1502         inptr = in;
 1503         inlen = len;
 1504         while (n && inlen) {
 1505             ctx->mres[n] = *inptr;
 1506             n = (n + 1) & 0xf;
 1507             ++inptr;
 1508             --inlen;
 1509         }
 1510         /* ctx->mres contains a complete block if offset has wrapped around */
 1511         if (!n) {
 1512             s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
 1513                       ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
 1514             ctx->fc |= S390X_KMA_HS;
 1515             ctx->areslen = 0;
 1516 
 1517             /* previous call already encrypted/decrypted its remainder,
 1518              * see comment below */
 1519             n = ctx->mreslen;
 1520             while (n) {
 1521                 *out = buf.b[n];
 1522                 n = (n + 1) & 0xf;
 1523                 ++out;
 1524                 ++in;
 1525                 --len;
 1526             }
 1527             ctx->mreslen = 0;
 1528         }
 1529     }
 1530 
 1531     rem = len & 0xf;
 1532 
 1533     len &= ~(size_t)0xf;
 1534     if (len) {
 1535         s390x_kma(ctx->ares, ctx->areslen, in, len, out,
 1536                   ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
 1537         in += len;
 1538         out += len;
 1539         ctx->fc |= S390X_KMA_HS;
 1540         ctx->areslen = 0;
 1541     }
 1542 
 1543     /*-
 1544      * If there is a remainder, it has to be saved such that it can be
 1545      * processed by kma later. However, we also have to do the for-now
 1546      * unauthenticated encryption/decryption part here and now...
 1547      */
 1548     if (rem) {
 1549         if (!ctx->mreslen) {
 1550             buf.w[0] = ctx->kma.param.j0.w[0];
 1551             buf.w[1] = ctx->kma.param.j0.w[1];
 1552             buf.w[2] = ctx->kma.param.j0.w[2];
 1553             buf.w[3] = ctx->kma.param.cv.w + 1;
 1554             s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
 1555         }
 1556 
 1557         n = ctx->mreslen;
 1558         for (i = 0; i < rem; i++) {
 1559             ctx->mres[n + i] = in[i];
 1560             out[i] = in[i] ^ ctx->kres[n + i];
 1561         }
 1562 
 1563         ctx->mreslen += rem;
 1564     }
 1565     return 0;
 1566 }
 1567 
 1568 /*-
 1569  * Initialize context structure. Code is big-endian.
 1570  */
 1571 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
 1572                                 const unsigned char *iv)
 1573 {
 1574     ctx->kma.param.t.g[0] = 0;
 1575     ctx->kma.param.t.g[1] = 0;
 1576     ctx->kma.param.tpcl = 0;
 1577     ctx->kma.param.taadl = 0;
 1578     ctx->mreslen = 0;
 1579     ctx->areslen = 0;
 1580     ctx->kreslen = 0;
 1581 
 1582     if (ctx->ivlen == 12) {
 1583         memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
 1584         ctx->kma.param.j0.w[3] = 1;
 1585         ctx->kma.param.cv.w = 1;
 1586     } else {
 1587         /* ctx->iv has the right size and is already padded. */
 1588         memcpy(ctx->iv, iv, ctx->ivlen);
 1589         s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
 1590                   ctx->fc, &ctx->kma.param);
 1591         ctx->fc |= S390X_KMA_HS;
 1592 
 1593         ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
 1594         ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
 1595         ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
 1596         ctx->kma.param.t.g[0] = 0;
 1597         ctx->kma.param.t.g[1] = 0;
 1598     }
 1599 }
 1600 
 1601 /*-
 1602  * Performs various operations on the context structure depending on control
 1603  * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
 1604  * Code is big-endian.
 1605  */
 1606 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
 1607 {
 1608     S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
 1609     S390X_AES_GCM_CTX *gctx_out;
 1610     EVP_CIPHER_CTX *out;
 1611     unsigned char *buf, *iv;
 1612     int ivlen, enc, len;
 1613 
 1614     switch (type) {
 1615     case EVP_CTRL_INIT:
 1616         ivlen = EVP_CIPHER_iv_length(c->cipher);
 1617         iv = EVP_CIPHER_CTX_iv_noconst(c);
 1618         gctx->key_set = 0;
 1619         gctx->iv_set = 0;
 1620         gctx->ivlen = ivlen;
 1621         gctx->iv = iv;
 1622         gctx->taglen = -1;
 1623         gctx->iv_gen = 0;
 1624         gctx->tls_aad_len = -1;
 1625         return 1;
 1626 
 1627     case EVP_CTRL_GET_IVLEN:
 1628         *(int *)ptr = gctx->ivlen;
 1629         return 1;
 1630 
 1631     case EVP_CTRL_AEAD_SET_IVLEN:
 1632         if (arg <= 0)
 1633             return 0;
 1634 
 1635         if (arg != 12) {
 1636             iv = EVP_CIPHER_CTX_iv_noconst(c);
 1637             len = S390X_gcm_ivpadlen(arg);
 1638 
 1639             /* Allocate memory for iv if needed. */
 1640             if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
 1641                 if (gctx->iv != iv)
 1642                     OPENSSL_free(gctx->iv);
 1643 
 1644                 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
 1645                     EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
 1646                     return 0;
 1647                 }
 1648             }
 1649             /* Add padding. */
 1650             memset(gctx->iv + arg, 0, len - arg - 8);
 1651             *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
 1652         }
 1653         gctx->ivlen = arg;
 1654         return 1;
 1655 
 1656     case EVP_CTRL_AEAD_SET_TAG:
 1657         buf = EVP_CIPHER_CTX_buf_noconst(c);
 1658         enc = EVP_CIPHER_CTX_encrypting(c);
 1659         if (arg <= 0 || arg > 16 || enc)
 1660             return 0;
 1661 
 1662         memcpy(buf, ptr, arg);
 1663         gctx->taglen = arg;
 1664         return 1;
 1665 
 1666     case EVP_CTRL_AEAD_GET_TAG:
 1667         enc = EVP_CIPHER_CTX_encrypting(c);
 1668         if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
 1669             return 0;
 1670 
 1671         memcpy(ptr, gctx->kma.param.t.b, arg);
 1672         return 1;
 1673 
 1674     case EVP_CTRL_GCM_SET_IV_FIXED:
 1675         /* Special case: -1 length restores whole iv */
 1676         if (arg == -1) {
 1677             memcpy(gctx->iv, ptr, gctx->ivlen);
 1678             gctx->iv_gen = 1;
 1679             return 1;
 1680         }
 1681         /*
 1682          * Fixed field must be at least 4 bytes and invocation field at least
 1683          * 8.
 1684          */
 1685         if ((arg < 4) || (gctx->ivlen - arg) < 8)
 1686             return 0;
 1687 
 1688         if (arg)
 1689             memcpy(gctx->iv, ptr, arg);
 1690 
 1691         enc = EVP_CIPHER_CTX_encrypting(c);
 1692         if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
 1693             return 0;
 1694 
 1695         gctx->iv_gen = 1;
 1696         return 1;
 1697 
 1698     case EVP_CTRL_GCM_IV_GEN:
 1699         if (gctx->iv_gen == 0 || gctx->key_set == 0)
 1700             return 0;
 1701 
 1702         s390x_aes_gcm_setiv(gctx, gctx->iv);
 1703 
 1704         if (arg <= 0 || arg > gctx->ivlen)
 1705             arg = gctx->ivlen;
 1706 
 1707         memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
 1708         /*
 1709          * Invocation field will be at least 8 bytes in size and so no need
 1710          * to check wrap around or increment more than last 8 bytes.
 1711          */
 1712         ctr64_inc(gctx->iv + gctx->ivlen - 8);
 1713         gctx->iv_set = 1;
 1714         return 1;
 1715 
 1716     case EVP_CTRL_GCM_SET_IV_INV:
 1717         enc = EVP_CIPHER_CTX_encrypting(c);
 1718         if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
 1719             return 0;
 1720 
 1721         memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
 1722         s390x_aes_gcm_setiv(gctx, gctx->iv);
 1723         gctx->iv_set = 1;
 1724         return 1;
 1725 
 1726     case EVP_CTRL_AEAD_TLS1_AAD:
 1727         /* Save the aad for later use. */
 1728         if (arg != EVP_AEAD_TLS1_AAD_LEN)
 1729             return 0;
 1730 
 1731         buf = EVP_CIPHER_CTX_buf_noconst(c);
 1732         memcpy(buf, ptr, arg);
 1733         gctx->tls_aad_len = arg;
 1734 
 1735         len = buf[arg - 2] << 8 | buf[arg - 1];
 1736         /* Correct length for explicit iv. */
 1737         if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
 1738             return 0;
 1739         len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
 1740 
 1741         /* If decrypting correct for tag too. */
 1742         enc = EVP_CIPHER_CTX_encrypting(c);
 1743         if (!enc) {
 1744             if (len < EVP_GCM_TLS_TAG_LEN)
 1745                 return 0;
 1746             len -= EVP_GCM_TLS_TAG_LEN;
 1747         }
 1748         buf[arg - 2] = len >> 8;
 1749         buf[arg - 1] = len & 0xff;
 1750         /* Extra padding: tag appended to record. */
 1751         return EVP_GCM_TLS_TAG_LEN;
 1752 
 1753     case EVP_CTRL_COPY:
 1754         out = ptr;
 1755         gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
 1756         iv = EVP_CIPHER_CTX_iv_noconst(c);
 1757 
 1758         if (gctx->iv == iv) {
 1759             gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
 1760         } else {
 1761             len = S390X_gcm_ivpadlen(gctx->ivlen);
 1762 
 1763             if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
 1764                 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
 1765                 return 0;
 1766             }
 1767 
 1768             memcpy(gctx_out->iv, gctx->iv, len);
 1769         }
 1770         return 1;
 1771 
 1772     default:
 1773         return -1;
 1774     }
 1775 }
 1776 
 1777 /*-
 1778  * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
 1779  */
 1780 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
 1781                                   const unsigned char *key,
 1782                                   const unsigned char *iv, int enc)
 1783 {
 1784     S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
 1785     int keylen;
 1786 
 1787     if (iv == NULL && key == NULL)
 1788         return 1;
 1789 
 1790     if (key != NULL) {
 1791         keylen = EVP_CIPHER_CTX_key_length(ctx);
 1792         memcpy(&gctx->kma.param.k, key, keylen);
 1793 
 1794         gctx->fc = S390X_AES_FC(keylen);
 1795         if (!enc)
 1796             gctx->fc |= S390X_DECRYPT;
 1797 
 1798         if (iv == NULL && gctx->iv_set)
 1799             iv = gctx->iv;
 1800 
 1801         if (iv != NULL) {
 1802             s390x_aes_gcm_setiv(gctx, iv);
 1803             gctx->iv_set = 1;
 1804         }
 1805         gctx->key_set = 1;
 1806     } else {
 1807         if (gctx->key_set)
 1808             s390x_aes_gcm_setiv(gctx, iv);
 1809         else
 1810             memcpy(gctx->iv, iv, gctx->ivlen);
 1811 
 1812         gctx->iv_set = 1;
 1813         gctx->iv_gen = 0;
 1814     }
 1815     return 1;
 1816 }
 1817 
 1818 /*-
 1819  * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
 1820  * if successful. Otherwise -1 is returned. Code is big-endian.
 1821  */
 1822 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 1823                                     const unsigned char *in, size_t len)
 1824 {
 1825     S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
 1826     const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
 1827     const int enc = EVP_CIPHER_CTX_encrypting(ctx);
 1828     int rv = -1;
 1829 
 1830     if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
 1831         return -1;
 1832 
 1833     if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
 1834                                      : EVP_CTRL_GCM_SET_IV_INV,
 1835                             EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
 1836         goto err;
 1837 
 1838     in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
 1839     out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
 1840     len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
 1841 
 1842     gctx->kma.param.taadl = gctx->tls_aad_len << 3;
 1843     gctx->kma.param.tpcl = len << 3;
 1844     s390x_kma(buf, gctx->tls_aad_len, in, len, out,
 1845               gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
 1846 
 1847     if (enc) {
 1848         memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
 1849         rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
 1850     } else {
 1851         if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
 1852                           EVP_GCM_TLS_TAG_LEN)) {
 1853             OPENSSL_cleanse(out, len);
 1854             goto err;
 1855         }
 1856         rv = len;
 1857     }
 1858 err:
 1859     gctx->iv_set = 0;
 1860     gctx->tls_aad_len = -1;
 1861     return rv;
 1862 }
 1863 
 1864 /*-
 1865  * Called from EVP layer to initialize context, process additional
 1866  * authenticated data, en/de-crypt plain/cipher-text and authenticate
 1867  * ciphertext or process a TLS packet, depending on context. Returns bytes
 1868  * written on success. Otherwise -1 is returned. Code is big-endian.
 1869  */
 1870 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 1871                                 const unsigned char *in, size_t len)
 1872 {
 1873     S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
 1874     unsigned char *buf, tmp[16];
 1875     int enc;
 1876 
 1877     if (!gctx->key_set)
 1878         return -1;
 1879 
 1880     if (gctx->tls_aad_len >= 0)
 1881         return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
 1882 
 1883     if (!gctx->iv_set)
 1884         return -1;
 1885 
 1886     if (in != NULL) {
 1887         if (out == NULL) {
 1888             if (s390x_aes_gcm_aad(gctx, in, len))
 1889                 return -1;
 1890         } else {
 1891             if (s390x_aes_gcm(gctx, in, out, len))
 1892                 return -1;
 1893         }
 1894         return len;
 1895     } else {
 1896         gctx->kma.param.taadl <<= 3;
 1897         gctx->kma.param.tpcl <<= 3;
 1898         s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
 1899                   gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
 1900         /* recall that we already did en-/decrypt gctx->mres
 1901          * and returned it to caller... */
 1902         OPENSSL_cleanse(tmp, gctx->mreslen);
 1903         gctx->iv_set = 0;
 1904 
 1905         enc = EVP_CIPHER_CTX_encrypting(ctx);
 1906         if (enc) {
 1907             gctx->taglen = 16;
 1908         } else {
 1909             if (gctx->taglen < 0)
 1910                 return -1;
 1911 
 1912             buf = EVP_CIPHER_CTX_buf_noconst(ctx);
 1913             if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
 1914                 return -1;
 1915         }
 1916         return 0;
 1917     }
 1918 }
 1919 
 1920 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
 1921 {
 1922     S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
 1923     const unsigned char *iv;
 1924 
 1925     if (gctx == NULL)
 1926         return 0;
 1927 
 1928     iv = EVP_CIPHER_CTX_iv(c);
 1929     if (iv != gctx->iv)
 1930         OPENSSL_free(gctx->iv);
 1931 
 1932     OPENSSL_cleanse(gctx, sizeof(*gctx));
 1933     return 1;
 1934 }
 1935 
 1936 # define S390X_AES_XTS_CTX      EVP_AES_XTS_CTX
 1937 # define S390X_aes_128_xts_CAPABLE  1   /* checked by callee */
 1938 # define S390X_aes_256_xts_CAPABLE  1
 1939 
 1940 # define s390x_aes_xts_init_key aes_xts_init_key
 1941 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
 1942                                   const unsigned char *key,
 1943                                   const unsigned char *iv, int enc);
 1944 # define s390x_aes_xts_cipher aes_xts_cipher
 1945 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 1946                                 const unsigned char *in, size_t len);
 1947 # define s390x_aes_xts_ctrl aes_xts_ctrl
 1948 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
 1949 # define s390x_aes_xts_cleanup aes_xts_cleanup
 1950 
 1951 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE &&        \
 1952                                     (OPENSSL_s390xcap_P.kmac[0] &   \
 1953                                      S390X_CAPBIT(S390X_AES_128)))
 1954 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE &&        \
 1955                                     (OPENSSL_s390xcap_P.kmac[0] &   \
 1956                                      S390X_CAPBIT(S390X_AES_192)))
 1957 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE &&        \
 1958                                     (OPENSSL_s390xcap_P.kmac[0] &   \
 1959                                      S390X_CAPBIT(S390X_AES_256)))
 1960 
 1961 # define S390X_CCM_AAD_FLAG 0x40
 1962 
 1963 /*-
 1964  * Set nonce and length fields. Code is big-endian.
 1965  */
 1966 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
 1967                                           const unsigned char *nonce,
 1968                                           size_t mlen)
 1969 {
 1970     ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
 1971     ctx->aes.ccm.nonce.g[1] = mlen;
 1972     memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
 1973 }
 1974 
 1975 /*-
 1976  * Process additional authenticated data. Code is big-endian.
 1977  */
 1978 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
 1979                               size_t alen)
 1980 {
 1981     unsigned char *ptr;
 1982     int i, rem;
 1983 
 1984     if (!alen)
 1985         return;
 1986 
 1987     ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
 1988 
 1989     /* Suppress 'type-punned pointer dereference' warning. */
 1990     ptr = ctx->aes.ccm.buf.b;
 1991 
 1992     if (alen < ((1 << 16) - (1 << 8))) {
 1993         *(uint16_t *)ptr = alen;
 1994         i = 2;
 1995     } else if (sizeof(alen) == 8
 1996                && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
 1997         *(uint16_t *)ptr = 0xffff;
 1998         *(uint64_t *)(ptr + 2) = alen;
 1999         i = 10;
 2000     } else {
 2001         *(uint16_t *)ptr = 0xfffe;
 2002         *(uint32_t *)(ptr + 2) = alen;
 2003         i = 6;
 2004     }
 2005 
 2006     while (i < 16 && alen) {
 2007         ctx->aes.ccm.buf.b[i] = *aad;
 2008         ++aad;
 2009         --alen;
 2010         ++i;
 2011     }
 2012     while (i < 16) {
 2013         ctx->aes.ccm.buf.b[i] = 0;
 2014         ++i;
 2015     }
 2016 
 2017     ctx->aes.ccm.kmac_param.icv.g[0] = 0;
 2018     ctx->aes.ccm.kmac_param.icv.g[1] = 0;
 2019     s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
 2020                &ctx->aes.ccm.kmac_param);
 2021     ctx->aes.ccm.blocks += 2;
 2022 
 2023     rem = alen & 0xf;
 2024     alen &= ~(size_t)0xf;
 2025     if (alen) {
 2026         s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
 2027         ctx->aes.ccm.blocks += alen >> 4;
 2028         aad += alen;
 2029     }
 2030     if (rem) {
 2031         for (i = 0; i < rem; i++)
 2032             ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
 2033 
 2034         s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
 2035                  ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
 2036                  ctx->aes.ccm.kmac_param.k);
 2037         ctx->aes.ccm.blocks++;
 2038     }
 2039 }
 2040 
 2041 /*-
 2042  * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
 2043  * success.
 2044  */
 2045 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
 2046                          unsigned char *out, size_t len, int enc)
 2047 {
 2048     size_t n, rem;
 2049     unsigned int i, l, num;
 2050     unsigned char flags;
 2051 
 2052     flags = ctx->aes.ccm.nonce.b[0];
 2053     if (!(flags & S390X_CCM_AAD_FLAG)) {
 2054         s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
 2055                  ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
 2056         ctx->aes.ccm.blocks++;
 2057     }
 2058     l = flags & 0x7;
 2059     ctx->aes.ccm.nonce.b[0] = l;
 2060 
 2061     /*-
 2062      * Reconstruct length from encoded length field
 2063      * and initialize it with counter value.
 2064      */
 2065     n = 0;
 2066     for (i = 15 - l; i < 15; i++) {
 2067         n |= ctx->aes.ccm.nonce.b[i];
 2068         ctx->aes.ccm.nonce.b[i] = 0;
 2069         n <<= 8;
 2070     }
 2071     n |= ctx->aes.ccm.nonce.b[15];
 2072     ctx->aes.ccm.nonce.b[15] = 1;
 2073 
 2074     if (n != len)
 2075         return -1;      /* length mismatch */
 2076 
 2077     if (enc) {
 2078         /* Two operations per block plus one for tag encryption */
 2079         ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
 2080         if (ctx->aes.ccm.blocks > (1ULL << 61))
 2081             return -2;      /* too much data */
 2082     }
 2083 
 2084     num = 0;
 2085     rem = len & 0xf;
 2086     len &= ~(size_t)0xf;
 2087 
 2088     if (enc) {
 2089         /* mac-then-encrypt */
 2090         if (len)
 2091             s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
 2092         if (rem) {
 2093             for (i = 0; i < rem; i++)
 2094                 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
 2095 
 2096             s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
 2097                      ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
 2098                      ctx->aes.ccm.kmac_param.k);
 2099         }
 2100 
 2101         CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
 2102                                     ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
 2103                                     &num, (ctr128_f)AES_ctr32_encrypt);
 2104     } else {
 2105         /* decrypt-then-mac */
 2106         CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
 2107                                     ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
 2108                                     &num, (ctr128_f)AES_ctr32_encrypt);
 2109 
 2110         if (len)
 2111             s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
 2112         if (rem) {
 2113             for (i = 0; i < rem; i++)
 2114                 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
 2115 
 2116             s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
 2117                      ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
 2118                      ctx->aes.ccm.kmac_param.k);
 2119         }
 2120     }
 2121     /* encrypt tag */
 2122     for (i = 15 - l; i < 16; i++)
 2123         ctx->aes.ccm.nonce.b[i] = 0;
 2124 
 2125     s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
 2126              ctx->aes.ccm.kmac_param.k);
 2127     ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
 2128     ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
 2129 
 2130     ctx->aes.ccm.nonce.b[0] = flags;    /* restore flags field */
 2131     return 0;
 2132 }
 2133 
 2134 /*-
 2135  * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
 2136  * if successful. Otherwise -1 is returned.
 2137  */
 2138 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 2139                                     const unsigned char *in, size_t len)
 2140 {
 2141     S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
 2142     unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
 2143     unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
 2144     const int enc = EVP_CIPHER_CTX_encrypting(ctx);
 2145 
 2146     if (out != in
 2147             || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
 2148         return -1;
 2149 
 2150     if (enc) {
 2151         /* Set explicit iv (sequence number). */
 2152         memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
 2153     }
 2154 
 2155     len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
 2156     /*-
 2157      * Get explicit iv (sequence number). We already have fixed iv
 2158      * (server/client_write_iv) here.
 2159      */
 2160     memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
 2161     s390x_aes_ccm_setiv(cctx, ivec, len);
 2162 
 2163     /* Process aad (sequence number|type|version|length) */
 2164     s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
 2165 
 2166     in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
 2167     out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
 2168 
 2169     if (enc) {
 2170         if (s390x_aes_ccm(cctx, in, out, len, enc))
 2171             return -1;
 2172 
 2173         memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
 2174         return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
 2175     } else {
 2176         if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
 2177             if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
 2178                                cctx->aes.ccm.m))
 2179                 return len;
 2180         }
 2181 
 2182         OPENSSL_cleanse(out, len);
 2183         return -1;
 2184     }
 2185 }
 2186 
 2187 /*-
 2188  * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
 2189  * returned.
 2190  */
 2191 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
 2192                                   const unsigned char *key,
 2193                                   const unsigned char *iv, int enc)
 2194 {
 2195     S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
 2196     unsigned char *ivec;
 2197     int keylen;
 2198 
 2199     if (iv == NULL && key == NULL)
 2200         return 1;
 2201 
 2202     if (key != NULL) {
 2203         keylen = EVP_CIPHER_CTX_key_length(ctx);
 2204         cctx->aes.ccm.fc = S390X_AES_FC(keylen);
 2205         memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
 2206 
 2207         /* Store encoded m and l. */
 2208         cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
 2209                                  | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
 2210         memset(cctx->aes.ccm.nonce.b + 1, 0,
 2211                sizeof(cctx->aes.ccm.nonce.b));
 2212         cctx->aes.ccm.blocks = 0;
 2213 
 2214         cctx->aes.ccm.key_set = 1;
 2215     }
 2216 
 2217     if (iv != NULL) {
 2218         ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
 2219         memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
 2220 
 2221         cctx->aes.ccm.iv_set = 1;
 2222     }
 2223 
 2224     return 1;
 2225 }
 2226 
 2227 /*-
 2228  * Called from EVP layer to initialize context, process additional
 2229  * authenticated data, en/de-crypt plain/cipher-text and authenticate
 2230  * plaintext or process a TLS packet, depending on context. Returns bytes
 2231  * written on success. Otherwise -1 is returned.
 2232  */
 2233 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 2234                                 const unsigned char *in, size_t len)
 2235 {
 2236     S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
 2237     const int enc = EVP_CIPHER_CTX_encrypting(ctx);
 2238     int rv;
 2239     unsigned char *buf, *ivec;
 2240 
 2241     if (!cctx->aes.ccm.key_set)
 2242         return -1;
 2243 
 2244     if (cctx->aes.ccm.tls_aad_len >= 0)
 2245         return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
 2246 
 2247     /*-
 2248      * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
 2249      * so integrity must be checked already at Update() i.e., before
 2250      * potentially corrupted data is output.
 2251      */
 2252     if (in == NULL && out != NULL)
 2253         return 0;
 2254 
 2255     if (!cctx->aes.ccm.iv_set)
 2256         return -1;
 2257 
 2258     if (out == NULL) {
 2259         /* Update(): Pass message length. */
 2260         if (in == NULL) {
 2261             ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
 2262             s390x_aes_ccm_setiv(cctx, ivec, len);
 2263 
 2264             cctx->aes.ccm.len_set = 1;
 2265             return len;
 2266         }
 2267 
 2268         /* Update(): Process aad. */
 2269         if (!cctx->aes.ccm.len_set && len)
 2270             return -1;
 2271 
 2272         s390x_aes_ccm_aad(cctx, in, len);
 2273         return len;
 2274     }
 2275 
 2276     /* The tag must be set before actually decrypting data */
 2277     if (!enc && !cctx->aes.ccm.tag_set)
 2278         return -1;
 2279 
 2280     /* Update(): Process message. */
 2281 
 2282     if (!cctx->aes.ccm.len_set) {
 2283         /*-
 2284          * In case message length was not previously set explicitly via
 2285          * Update(), set it now.
 2286          */
 2287         ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
 2288         s390x_aes_ccm_setiv(cctx, ivec, len);
 2289 
 2290         cctx->aes.ccm.len_set = 1;
 2291     }
 2292 
 2293     if (enc) {
 2294         if (s390x_aes_ccm(cctx, in, out, len, enc))
 2295             return -1;
 2296 
 2297         cctx->aes.ccm.tag_set = 1;
 2298         return len;
 2299     } else {
 2300         rv = -1;
 2301 
 2302         if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
 2303             buf = EVP_CIPHER_CTX_buf_noconst(ctx);
 2304             if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
 2305                                cctx->aes.ccm.m))
 2306                 rv = len;
 2307         }
 2308 
 2309         if (rv == -1)
 2310             OPENSSL_cleanse(out, len);
 2311 
 2312         cctx->aes.ccm.iv_set = 0;
 2313         cctx->aes.ccm.tag_set = 0;
 2314         cctx->aes.ccm.len_set = 0;
 2315         return rv;
 2316     }
 2317 }
 2318 
 2319 /*-
 2320  * Performs various operations on the context structure depending on control
 2321  * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
 2322  * Code is big-endian.
 2323  */
 2324 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
 2325 {
 2326     S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
 2327     unsigned char *buf, *iv;
 2328     int enc, len;
 2329 
 2330     switch (type) {
 2331     case EVP_CTRL_INIT:
 2332         cctx->aes.ccm.key_set = 0;
 2333         cctx->aes.ccm.iv_set = 0;
 2334         cctx->aes.ccm.l = 8;
 2335         cctx->aes.ccm.m = 12;
 2336         cctx->aes.ccm.tag_set = 0;
 2337         cctx->aes.ccm.len_set = 0;
 2338         cctx->aes.ccm.tls_aad_len = -1;
 2339         return 1;
 2340 
 2341     case EVP_CTRL_GET_IVLEN:
 2342         *(int *)ptr = 15 - cctx->aes.ccm.l;
 2343         return 1;
 2344 
 2345     case EVP_CTRL_AEAD_TLS1_AAD:
 2346         if (arg != EVP_AEAD_TLS1_AAD_LEN)
 2347             return 0;
 2348 
 2349         /* Save the aad for later use. */
 2350         buf = EVP_CIPHER_CTX_buf_noconst(c);
 2351         memcpy(buf, ptr, arg);
 2352         cctx->aes.ccm.tls_aad_len = arg;
 2353 
 2354         len = buf[arg - 2] << 8 | buf[arg - 1];
 2355         if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
 2356             return 0;
 2357 
 2358         /* Correct length for explicit iv. */
 2359         len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
 2360 
 2361         enc = EVP_CIPHER_CTX_encrypting(c);
 2362         if (!enc) {
 2363             if (len < cctx->aes.ccm.m)
 2364                 return 0;
 2365 
 2366             /* Correct length for tag. */
 2367             len -= cctx->aes.ccm.m;
 2368         }
 2369 
 2370         buf[arg - 2] = len >> 8;
 2371         buf[arg - 1] = len & 0xff;
 2372 
 2373         /* Extra padding: tag appended to record. */
 2374         return cctx->aes.ccm.m;
 2375 
 2376     case EVP_CTRL_CCM_SET_IV_FIXED:
 2377         if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
 2378             return 0;
 2379 
 2380         /* Copy to first part of the iv. */
 2381         iv = EVP_CIPHER_CTX_iv_noconst(c);
 2382         memcpy(iv, ptr, arg);
 2383         return 1;
 2384 
 2385     case EVP_CTRL_AEAD_SET_IVLEN:
 2386         arg = 15 - arg;
 2387         /* fall-through */
 2388 
 2389     case EVP_CTRL_CCM_SET_L:
 2390         if (arg < 2 || arg > 8)
 2391             return 0;
 2392 
 2393         cctx->aes.ccm.l = arg;
 2394         return 1;
 2395 
 2396     case EVP_CTRL_AEAD_SET_TAG:
 2397         if ((arg & 1) || arg < 4 || arg > 16)
 2398             return 0;
 2399 
 2400         enc = EVP_CIPHER_CTX_encrypting(c);
 2401         if (enc && ptr)
 2402             return 0;
 2403 
 2404         if (ptr) {
 2405             cctx->aes.ccm.tag_set = 1;
 2406             buf = EVP_CIPHER_CTX_buf_noconst(c);
 2407             memcpy(buf, ptr, arg);
 2408         }
 2409 
 2410         cctx->aes.ccm.m = arg;
 2411         return 1;
 2412 
 2413     case EVP_CTRL_AEAD_GET_TAG:
 2414         enc = EVP_CIPHER_CTX_encrypting(c);
 2415         if (!enc || !cctx->aes.ccm.tag_set)
 2416             return 0;
 2417 
 2418         if(arg < cctx->aes.ccm.m)
 2419             return 0;
 2420 
 2421         memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
 2422         cctx->aes.ccm.tag_set = 0;
 2423         cctx->aes.ccm.iv_set = 0;
 2424         cctx->aes.ccm.len_set = 0;
 2425         return 1;
 2426 
 2427     case EVP_CTRL_COPY:
 2428         return 1;
 2429 
 2430     default:
 2431         return -1;
 2432     }
 2433 }
 2434 
 2435 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
 2436 
 2437 # ifndef OPENSSL_NO_OCB
 2438 #  define S390X_AES_OCB_CTX     EVP_AES_OCB_CTX
 2439 #  define S390X_aes_128_ocb_CAPABLE 0
 2440 #  define S390X_aes_192_ocb_CAPABLE 0
 2441 #  define S390X_aes_256_ocb_CAPABLE 0
 2442 
 2443 #  define s390x_aes_ocb_init_key aes_ocb_init_key
 2444 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
 2445                                   const unsigned char *iv, int enc);
 2446 #  define s390x_aes_ocb_cipher aes_ocb_cipher
 2447 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 2448                                 const unsigned char *in, size_t len);
 2449 #  define s390x_aes_ocb_cleanup aes_ocb_cleanup
 2450 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
 2451 #  define s390x_aes_ocb_ctrl aes_ocb_ctrl
 2452 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
 2453 # endif
 2454 
 2455 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,    \
 2456                               MODE,flags)               \
 2457 static const EVP_CIPHER s390x_aes_##keylen##_##mode = {         \
 2458     nid##_##keylen##_##nmode,blocksize,                 \
 2459     keylen / 8,                             \
 2460     ivlen,                              \
 2461     flags | EVP_CIPH_##MODE##_MODE,                 \
 2462     s390x_aes_##mode##_init_key,                    \
 2463     s390x_aes_##mode##_cipher,                      \
 2464     NULL,                               \
 2465     sizeof(S390X_AES_##MODE##_CTX),                 \
 2466     NULL,                               \
 2467     NULL,                               \
 2468     NULL,                               \
 2469     NULL                                \
 2470 };                                  \
 2471 static const EVP_CIPHER aes_##keylen##_##mode = {           \
 2472     nid##_##keylen##_##nmode,                       \
 2473     blocksize,                              \
 2474     keylen / 8,                             \
 2475     ivlen,                              \
 2476     flags | EVP_CIPH_##MODE##_MODE,                 \
 2477     aes_init_key,                           \
 2478     aes_##mode##_cipher,                        \
 2479     NULL,                               \
 2480     sizeof(EVP_AES_KEY),                        \
 2481     NULL,                               \
 2482     NULL,                               \
 2483     NULL,                               \
 2484     NULL                                \
 2485 };                                  \
 2486 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)           \
 2487 {                                   \
 2488     return S390X_aes_##keylen##_##mode##_CAPABLE ?          \
 2489            &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode;   \
 2490 }
 2491 
 2492 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
 2493 static const EVP_CIPHER s390x_aes_##keylen##_##mode = {         \
 2494     nid##_##keylen##_##mode,                        \
 2495     blocksize,                              \
 2496     (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
 2497     ivlen,                              \
 2498     flags | EVP_CIPH_##MODE##_MODE,                 \
 2499     s390x_aes_##mode##_init_key,                    \
 2500     s390x_aes_##mode##_cipher,                      \
 2501     s390x_aes_##mode##_cleanup,                     \
 2502     sizeof(S390X_AES_##MODE##_CTX),                 \
 2503     NULL,                               \
 2504     NULL,                               \
 2505     s390x_aes_##mode##_ctrl,                        \
 2506     NULL                                \
 2507 };                                  \
 2508 static const EVP_CIPHER aes_##keylen##_##mode = {           \
 2509     nid##_##keylen##_##mode,blocksize,                  \
 2510     (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
 2511     ivlen,                              \
 2512     flags | EVP_CIPH_##MODE##_MODE,                 \
 2513     aes_##mode##_init_key,                      \
 2514     aes_##mode##_cipher,                        \
 2515     aes_##mode##_cleanup,                       \
 2516     sizeof(EVP_AES_##MODE##_CTX),                   \
 2517     NULL,                               \
 2518     NULL,                               \
 2519     aes_##mode##_ctrl,                          \
 2520     NULL                                \
 2521 };                                  \
 2522 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)           \
 2523 {                                   \
 2524     return S390X_aes_##keylen##_##mode##_CAPABLE ?          \
 2525            &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode;   \
 2526 }
 2527 
 2528 #else
 2529 
 2530 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
 2531 static const EVP_CIPHER aes_##keylen##_##mode = { \
 2532         nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
 2533         flags|EVP_CIPH_##MODE##_MODE,   \
 2534         aes_init_key,                   \
 2535         aes_##mode##_cipher,            \
 2536         NULL,                           \
 2537         sizeof(EVP_AES_KEY),            \
 2538         NULL,NULL,NULL,NULL }; \
 2539 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
 2540 { return &aes_##keylen##_##mode; }
 2541 
 2542 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
 2543 static const EVP_CIPHER aes_##keylen##_##mode = { \
 2544         nid##_##keylen##_##mode,blocksize, \
 2545         (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
 2546         flags|EVP_CIPH_##MODE##_MODE,   \
 2547         aes_##mode##_init_key,          \
 2548         aes_##mode##_cipher,            \
 2549         aes_##mode##_cleanup,           \
 2550         sizeof(EVP_AES_##MODE##_CTX),   \
 2551         NULL,NULL,aes_##mode##_ctrl,NULL }; \
 2552 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
 2553 { return &aes_##keylen##_##mode; }
 2554 
 2555 #endif
 2556 
 2557 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
 2558 # include "arm_arch.h"
 2559 # if __ARM_MAX_ARCH__>=7
 2560 #  if defined(BSAES_ASM)
 2561 #   define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
 2562 #  endif
 2563 #  if defined(VPAES_ASM)
 2564 #   define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
 2565 #  endif
 2566 #  define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
 2567 #  define HWAES_set_encrypt_key aes_v8_set_encrypt_key
 2568 #  define HWAES_set_decrypt_key aes_v8_set_decrypt_key
 2569 #  define HWAES_encrypt aes_v8_encrypt
 2570 #  define HWAES_decrypt aes_v8_decrypt
 2571 #  define HWAES_cbc_encrypt aes_v8_cbc_encrypt
 2572 #  define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
 2573 # endif
 2574 #endif
 2575 
 2576 #if defined(HWAES_CAPABLE)
 2577 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
 2578                           AES_KEY *key);
 2579 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
 2580                           AES_KEY *key);
 2581 void HWAES_encrypt(const unsigned char *in, unsigned char *out,
 2582                    const AES_KEY *key);
 2583 void HWAES_decrypt(const unsigned char *in, unsigned char *out,
 2584                    const AES_KEY *key);
 2585 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
 2586                        size_t length, const AES_KEY *key,
 2587                        unsigned char *ivec, const int enc);
 2588 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
 2589                                 size_t len, const AES_KEY *key,
 2590                                 const unsigned char ivec[16]);
 2591 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out,
 2592                        size_t len, const AES_KEY *key1,
 2593                        const AES_KEY *key2, const unsigned char iv[16]);
 2594 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out,
 2595                        size_t len, const AES_KEY *key1,
 2596                        const AES_KEY *key2, const unsigned char iv[16]);
 2597 #endif
 2598 
 2599 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags)             \
 2600         BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)     \
 2601         BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)      \
 2602         BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)   \
 2603         BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)   \
 2604         BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags)       \
 2605         BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags)       \
 2606         BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
 2607 
 2608 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
 2609                         const unsigned char *iv, int enc)
 2610 {
 2611     int ret, mode;
 2612     EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
 2613 
 2614     mode = EVP_CIPHER_CTX_mode(ctx);
 2615     if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
 2616         && !enc) {
 2617 #ifdef HWAES_CAPABLE
 2618         if (HWAES_CAPABLE) {
 2619             ret = HWAES_set_decrypt_key(key,
 2620                                         EVP_CIPHER_CTX_key_length(ctx) * 8,
 2621                                         &dat->ks.ks);
 2622             dat->block = (block128_f) HWAES_decrypt;
 2623             dat->stream.cbc = NULL;
 2624 # ifdef HWAES_cbc_encrypt
 2625             if (mode == EVP_CIPH_CBC_MODE)
 2626                 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
 2627 # endif
 2628         } else
 2629 #endif
 2630 #ifdef BSAES_CAPABLE
 2631         if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
 2632             ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
 2633                                       &dat->ks.ks);
 2634             dat->block = (block128_f) AES_decrypt;
 2635             dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
 2636         } else
 2637 #endif
 2638 #ifdef VPAES_CAPABLE
 2639         if (VPAES_CAPABLE) {
 2640             ret = vpaes_set_decrypt_key(key,
 2641                                         EVP_CIPHER_CTX_key_length(ctx) * 8,
 2642                                         &dat->ks.ks);
 2643             dat->block = (block128_f) vpaes_decrypt;
 2644             dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
 2645                 (cbc128_f) vpaes_cbc_encrypt : NULL;
 2646         } else
 2647 #endif
 2648         {
 2649             ret = AES_set_decrypt_key(key,
 2650                                       EVP_CIPHER_CTX_key_length(ctx) * 8,
 2651                                       &dat->ks.ks);
 2652             dat->block = (block128_f) AES_decrypt;
 2653             dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
 2654                 (cbc128_f) AES_cbc_encrypt : NULL;
 2655         }
 2656     } else
 2657 #ifdef HWAES_CAPABLE
 2658     if (HWAES_CAPABLE) {
 2659         ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
 2660                                     &dat->ks.ks);
 2661         dat->block = (block128_f) HWAES_encrypt;
 2662         dat->stream.cbc = NULL;
 2663 # ifdef HWAES_cbc_encrypt
 2664         if (mode == EVP_CIPH_CBC_MODE)
 2665             dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
 2666         else
 2667 # endif
 2668 # ifdef HWAES_ctr32_encrypt_blocks
 2669         if (mode == EVP_CIPH_CTR_MODE)
 2670             dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
 2671         else
 2672 # endif
 2673             (void)0;            /* terminate potentially open 'else' */
 2674     } else
 2675 #endif
 2676 #ifdef BSAES_CAPABLE
 2677     if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
 2678         ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
 2679                                   &dat->ks.ks);
 2680         dat->block = (block128_f) AES_encrypt;
 2681         dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
 2682     } else
 2683 #endif
 2684 #ifdef VPAES_CAPABLE
 2685     if (VPAES_CAPABLE) {
 2686         ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
 2687                                     &dat->ks.ks);
 2688         dat->block = (block128_f) vpaes_encrypt;
 2689         dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
 2690             (cbc128_f) vpaes_cbc_encrypt : NULL;
 2691     } else
 2692 #endif
 2693     {
 2694         ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
 2695                                   &dat->ks.ks);
 2696         dat->block = (block128_f) AES_encrypt;
 2697         dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
 2698             (cbc128_f) AES_cbc_encrypt : NULL;
 2699 #ifdef AES_CTR_ASM
 2700         if (mode == EVP_CIPH_CTR_MODE)
 2701             dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
 2702 #endif
 2703     }
 2704 
 2705     if (ret < 0) {
 2706         EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
 2707         return 0;
 2708     }
 2709 
 2710     return 1;
 2711 }
 2712 
 2713 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 2714                           const unsigned char *in, size_t len)
 2715 {
 2716     EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
 2717 
 2718     if (dat->stream.cbc)
 2719         (*dat->stream.cbc) (in, out, len, &dat->ks,
 2720                             EVP_CIPHER_CTX_iv_noconst(ctx),
 2721                             EVP_CIPHER_CTX_encrypting(ctx));
 2722     else if (EVP_CIPHER_CTX_encrypting(ctx))
 2723         CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
 2724                               EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
 2725     else
 2726         CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
 2727                               EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
 2728 
 2729     return 1;
 2730 }
 2731 
 2732 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 2733                           const unsigned char *in, size_t len)
 2734 {
 2735     size_t bl = EVP_CIPHER_CTX_block_size(ctx);
 2736     size_t i;
 2737     EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
 2738 
 2739     if (len < bl)
 2740         return 1;
 2741 
 2742     for (i = 0, len -= bl; i <= len; i += bl)
 2743         (*dat->block) (in + i, out + i, &dat->ks);
 2744 
 2745     return 1;
 2746 }
 2747 
 2748 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 2749                           const unsigned char *in, size_t len)
 2750 {
 2751     EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
 2752 
 2753     int num = EVP_CIPHER_CTX_num(ctx);
 2754     CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
 2755                           EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
 2756     EVP_CIPHER_CTX_set_num(ctx, num);
 2757     return 1;
 2758 }
 2759 
 2760 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 2761                           const unsigned char *in, size_t len)
 2762 {
 2763     EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
 2764 
 2765     int num = EVP_CIPHER_CTX_num(ctx);
 2766     CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
 2767                           EVP_CIPHER_CTX_iv_noconst(ctx), &num,
 2768                           EVP_CIPHER_CTX_encrypting(ctx), dat->block);
 2769     EVP_CIPHER_CTX_set_num(ctx, num);
 2770     return 1;
 2771 }
 2772 
 2773 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 2774                            const unsigned char *in, size_t len)
 2775 {
 2776     EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
 2777 
 2778     int num = EVP_CIPHER_CTX_num(ctx);
 2779     CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
 2780                             EVP_CIPHER_CTX_iv_noconst(ctx), &num,
 2781                             EVP_CIPHER_CTX_encrypting(ctx), dat->block);
 2782     EVP_CIPHER_CTX_set_num(ctx, num);
 2783     return 1;
 2784 }
 2785 
 2786 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 2787                            const unsigned char *in, size_t len)
 2788 {
 2789     EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
 2790 
 2791     if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
 2792         int num = EVP_CIPHER_CTX_num(ctx);
 2793         CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
 2794                                 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
 2795                                 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
 2796         EVP_CIPHER_CTX_set_num(ctx, num);
 2797         return 1;
 2798     }
 2799 
 2800     while (len >= MAXBITCHUNK) {
 2801         int num = EVP_CIPHER_CTX_num(ctx);
 2802         CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
 2803                                 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
 2804                                 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
 2805         EVP_CIPHER_CTX_set_num(ctx, num);
 2806         len -= MAXBITCHUNK;
 2807         out += MAXBITCHUNK;
 2808         in  += MAXBITCHUNK;
 2809     }
 2810     if (len) {
 2811         int num = EVP_CIPHER_CTX_num(ctx);
 2812         CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
 2813                                 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
 2814                                 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
 2815         EVP_CIPHER_CTX_set_num(ctx, num);
 2816     }
 2817 
 2818     return 1;
 2819 }
 2820 
 2821 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 2822                           const unsigned char *in, size_t len)
 2823 {
 2824     unsigned int num = EVP_CIPHER_CTX_num(ctx);
 2825     EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
 2826 
 2827     if (dat->stream.ctr)
 2828         CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
 2829                                     EVP_CIPHER_CTX_iv_noconst(ctx),
 2830                                     EVP_CIPHER_CTX_buf_noconst(ctx),
 2831                                     &num, dat->stream.ctr);
 2832     else
 2833         CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
 2834                               EVP_CIPHER_CTX_iv_noconst(ctx),
 2835                               EVP_CIPHER_CTX_buf_noconst(ctx), &num,
 2836                               dat->block);
 2837     EVP_CIPHER_CTX_set_num(ctx, num);
 2838     return 1;
 2839 }
 2840 
 2841 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
 2842     BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
 2843     BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
 2844 
 2845 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
 2846 {
 2847     EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
 2848     if (gctx == NULL)
 2849         return 0;
 2850     OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
 2851     if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
 2852         OPENSSL_free(gctx->iv);
 2853     return 1;
 2854 }
 2855 
 2856 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
 2857 {
 2858     EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
 2859     switch (type) {
 2860     case EVP_CTRL_INIT:
 2861         gctx->key_set = 0;
 2862         gctx->iv_set = 0;
 2863         gctx->ivlen = EVP_CIPHER_iv_length(c->cipher);
 2864         gctx->iv = c->iv;
 2865         gctx->taglen = -1;
 2866         gctx->iv_gen = 0;
 2867         gctx->tls_aad_len = -1;
 2868         return 1;
 2869 
 2870     case EVP_CTRL_GET_IVLEN:
 2871         *(int *)ptr = gctx->ivlen;
 2872         return 1;
 2873 
 2874     case EVP_CTRL_AEAD_SET_IVLEN:
 2875         if (arg <= 0)
 2876             return 0;
 2877         /* Allocate memory for IV if needed */
 2878         if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
 2879             if (gctx->iv != c->iv)
 2880                 OPENSSL_free(gctx->iv);
 2881             if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
 2882                 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
 2883                 return 0;
 2884             }
 2885         }
 2886         gctx->ivlen = arg;
 2887         return 1;
 2888 
 2889     case EVP_CTRL_AEAD_SET_TAG:
 2890         if (arg <= 0 || arg > 16 || c->encrypt)
 2891             return 0;
 2892         memcpy(c->buf, ptr, arg);
 2893         gctx->taglen = arg;
 2894         return 1;
 2895 
 2896     case EVP_CTRL_AEAD_GET_TAG:
 2897         if (arg <= 0 || arg > 16 || !c->encrypt
 2898             || gctx->taglen < 0)
 2899             return 0;
 2900         memcpy(ptr, c->buf, arg);
 2901         return 1;
 2902 
 2903     case EVP_CTRL_GCM_SET_IV_FIXED:
 2904         /* Special case: -1 length restores whole IV */
 2905         if (arg == -1) {
 2906             memcpy(gctx->iv, ptr, gctx->ivlen);
 2907             gctx->iv_gen = 1;
 2908             return 1;
 2909         }
 2910         /*
 2911          * Fixed field must be at least 4 bytes and invocation field at least
 2912          * 8.
 2913          */
 2914         if ((arg < 4) || (gctx->ivlen - arg) < 8)
 2915             return 0;
 2916         if (arg)
 2917             memcpy(gctx->iv, ptr, arg);
 2918         if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
 2919             return 0;
 2920         gctx->iv_gen = 1;
 2921         return 1;
 2922 
 2923     case EVP_CTRL_GCM_IV_GEN:
 2924         if (gctx->iv_gen == 0 || gctx->key_set == 0)
 2925             return 0;
 2926         CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
 2927         if (arg <= 0 || arg > gctx->ivlen)
 2928             arg = gctx->ivlen;
 2929         memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
 2930         /*
 2931          * Invocation field will be at least 8 bytes in size and so no need
 2932          * to check wrap around or increment more than last 8 bytes.
 2933          */
 2934         ctr64_inc(gctx->iv + gctx->ivlen - 8);
 2935         gctx->iv_set = 1;
 2936         return 1;
 2937 
 2938     case EVP_CTRL_GCM_SET_IV_INV:
 2939         if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
 2940             return 0;
 2941         memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
 2942         CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
 2943         gctx->iv_set = 1;
 2944         return 1;
 2945 
 2946     case EVP_CTRL_AEAD_TLS1_AAD:
 2947         /* Save the AAD for later use */
 2948         if (arg != EVP_AEAD_TLS1_AAD_LEN)
 2949             return 0;
 2950         memcpy(c->buf, ptr, arg);
 2951         gctx->tls_aad_len = arg;
 2952         {
 2953             unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
 2954             /* Correct length for explicit IV */
 2955             if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
 2956                 return 0;
 2957             len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
 2958             /* If decrypting correct for tag too */
 2959             if (!c->encrypt) {
 2960                 if (len < EVP_GCM_TLS_TAG_LEN)
 2961                     return 0;
 2962                 len -= EVP_GCM_TLS_TAG_LEN;
 2963             }
 2964             c->buf[arg - 2] = len >> 8;
 2965             c->buf[arg - 1] = len & 0xff;
 2966         }
 2967         /* Extra padding: tag appended to record */
 2968         return EVP_GCM_TLS_TAG_LEN;
 2969 
 2970     case EVP_CTRL_COPY:
 2971         {
 2972             EVP_CIPHER_CTX *out = ptr;
 2973             EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
 2974             if (gctx->gcm.key) {
 2975                 if (gctx->gcm.key != &gctx->ks)
 2976                     return 0;
 2977                 gctx_out->gcm.key = &gctx_out->ks;
 2978             }
 2979             if (gctx->iv == c->iv)
 2980                 gctx_out->iv = out->iv;
 2981             else {
 2982                 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
 2983                     EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
 2984                     return 0;
 2985                 }
 2986                 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
 2987             }
 2988             return 1;
 2989         }
 2990 
 2991     default:
 2992         return -1;
 2993 
 2994     }
 2995 }
 2996 
 2997 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
 2998                             const unsigned char *iv, int enc)
 2999 {
 3000     EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
 3001     if (!iv && !key)
 3002         return 1;
 3003     if (key) {
 3004         do {
 3005 #ifdef HWAES_CAPABLE
 3006             if (HWAES_CAPABLE) {
 3007                 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
 3008                 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
 3009                                    (block128_f) HWAES_encrypt);
 3010 # ifdef HWAES_ctr32_encrypt_blocks
 3011                 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
 3012 # else
 3013                 gctx->ctr = NULL;
 3014 # endif
 3015                 break;
 3016             } else
 3017 #endif
 3018 #ifdef BSAES_CAPABLE
 3019             if (BSAES_CAPABLE) {
 3020                 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
 3021                 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
 3022                                    (block128_f) AES_encrypt);
 3023                 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
 3024                 break;
 3025             } else
 3026 #endif
 3027 #ifdef VPAES_CAPABLE
 3028             if (VPAES_CAPABLE) {
 3029                 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
 3030                 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
 3031                                    (block128_f) vpaes_encrypt);
 3032                 gctx->ctr = NULL;
 3033                 break;
 3034             } else
 3035 #endif
 3036                 (void)0;        /* terminate potentially open 'else' */
 3037 
 3038             AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
 3039             CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
 3040                                (block128_f) AES_encrypt);
 3041 #ifdef AES_CTR_ASM
 3042             gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
 3043 #else
 3044             gctx->ctr = NULL;
 3045 #endif
 3046         } while (0);
 3047 
 3048         /*
 3049          * If we have an iv can set it directly, otherwise use saved IV.
 3050          */
 3051         if (iv == NULL && gctx->iv_set)
 3052             iv = gctx->iv;
 3053         if (iv) {
 3054             CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
 3055             gctx->iv_set = 1;
 3056         }
 3057         gctx->key_set = 1;
 3058     } else {
 3059         /* If key set use IV, otherwise copy */
 3060         if (gctx->key_set)
 3061             CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
 3062         else
 3063             memcpy(gctx->iv, iv, gctx->ivlen);
 3064         gctx->iv_set = 1;
 3065         gctx->iv_gen = 0;
 3066     }
 3067     return 1;
 3068 }
 3069 
 3070 /*
 3071  * Handle TLS GCM packet format. This consists of the last portion of the IV
 3072  * followed by the payload and finally the tag. On encrypt generate IV,
 3073  * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
 3074  * and verify tag.
 3075  */
 3076 
 3077 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 3078                               const unsigned char *in, size_t len)
 3079 {
 3080     EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
 3081     int rv = -1;
 3082     /* Encrypt/decrypt must be performed in place */
 3083     if (out != in
 3084         || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
 3085         return -1;
 3086     /*
 3087      * Set IV from start of buffer or generate IV and write to start of
 3088      * buffer.
 3089      */
 3090     if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
 3091                                               : EVP_CTRL_GCM_SET_IV_INV,
 3092                             EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
 3093         goto err;
 3094     /* Use saved AAD */
 3095     if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
 3096         goto err;
 3097     /* Fix buffer and length to point to payload */
 3098     in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
 3099     out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
 3100     len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
 3101     if (ctx->encrypt) {
 3102         /* Encrypt payload */
 3103         if (gctx->ctr) {
 3104             size_t bulk = 0;
 3105 #if defined(AES_GCM_ASM)
 3106             if (len >= 32 && AES_GCM_ASM(gctx)) {
 3107                 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
 3108                     return -1;
 3109 
 3110                 bulk = AES_gcm_encrypt(in, out, len,
 3111                                        gctx->gcm.key,
 3112                                        gctx->gcm.Yi.c, gctx->gcm.Xi.u);
 3113                 gctx->gcm.len.u[1] += bulk;
 3114             }
 3115 #endif
 3116             if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
 3117                                             in + bulk,
 3118                                             out + bulk,
 3119                                             len - bulk, gctx->ctr))
 3120                 goto err;
 3121         } else {
 3122             size_t bulk = 0;
 3123 #if defined(AES_GCM_ASM2)
 3124             if (len >= 32 && AES_GCM_ASM2(gctx)) {
 3125                 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
 3126                     return -1;
 3127 
 3128                 bulk = AES_gcm_encrypt(in, out, len,
 3129                                        gctx->gcm.key,
 3130                                        gctx->gcm.Yi.c, gctx->gcm.Xi.u);
 3131                 gctx->gcm.len.u[1] += bulk;
 3132             }
 3133 #endif
 3134             if (CRYPTO_gcm128_encrypt(&gctx->gcm,
 3135                                       in + bulk, out + bulk, len - bulk))
 3136                 goto err;
 3137         }
 3138         out += len;
 3139         /* Finally write tag */
 3140         CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
 3141         rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
 3142     } else {
 3143         /* Decrypt */
 3144         if (gctx->ctr) {
 3145             size_t bulk = 0;
 3146 #if defined(AES_GCM_ASM)
 3147             if (len >= 16 && AES_GCM_ASM(gctx)) {
 3148                 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
 3149                     return -1;
 3150 
 3151                 bulk = AES_gcm_decrypt(in, out, len,
 3152                                        gctx->gcm.key,
 3153                                        gctx->gcm.Yi.c, gctx->gcm.Xi.u);
 3154                 gctx->gcm.len.u[1] += bulk;
 3155             }
 3156 #endif
 3157             if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
 3158                                             in + bulk,
 3159                                             out + bulk,
 3160                                             len - bulk, gctx->ctr))
 3161                 goto err;
 3162         } else {
 3163             size_t bulk = 0;
 3164 #if defined(AES_GCM_ASM2)
 3165             if (len >= 16 && AES_GCM_ASM2(gctx)) {
 3166                 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
 3167                     return -1;
 3168 
 3169                 bulk = AES_gcm_decrypt(in, out, len,
 3170                                        gctx->gcm.key,
 3171                                        gctx->gcm.Yi.c, gctx->gcm.Xi.u);
 3172                 gctx->gcm.len.u[1] += bulk;
 3173             }
 3174 #endif
 3175             if (CRYPTO_gcm128_decrypt(&gctx->gcm,
 3176                                       in + bulk, out + bulk, len - bulk))
 3177                 goto err;
 3178         }
 3179         /* Retrieve tag */
 3180         CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
 3181         /* If tag mismatch wipe buffer */
 3182         if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
 3183             OPENSSL_cleanse(out, len);
 3184             goto err;
 3185         }
 3186         rv = len;
 3187     }
 3188 
 3189  err:
 3190     gctx->iv_set = 0;
 3191     gctx->tls_aad_len = -1;
 3192     return rv;
 3193 }
 3194 
 3195 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 3196                           const unsigned char *in, size_t len)
 3197 {
 3198     EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
 3199     /* If not set up, return error */
 3200     if (!gctx->key_set)
 3201         return -1;
 3202 
 3203     if (gctx->tls_aad_len >= 0)
 3204         return aes_gcm_tls_cipher(ctx, out, in, len);
 3205 
 3206     if (!gctx->iv_set)
 3207         return -1;
 3208     if (in) {
 3209         if (out == NULL) {
 3210             if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
 3211                 return -1;
 3212         } else if (ctx->encrypt) {
 3213             if (gctx->ctr) {
 3214                 size_t bulk = 0;
 3215 #if defined(AES_GCM_ASM)
 3216                 if (len >= 32 && AES_GCM_ASM(gctx)) {
 3217                     size_t res = (16 - gctx->gcm.mres) % 16;
 3218 
 3219                     if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
 3220                         return -1;
 3221 
 3222                     bulk = AES_gcm_encrypt(in + res,
 3223                                            out + res, len - res,
 3224                                            gctx->gcm.key, gctx->gcm.Yi.c,
 3225                                            gctx->gcm.Xi.u);
 3226                     gctx->gcm.len.u[1] += bulk;
 3227                     bulk += res;
 3228                 }
 3229 #endif
 3230                 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
 3231                                                 in + bulk,
 3232                                                 out + bulk,
 3233                                                 len - bulk, gctx->ctr))
 3234                     return -1;
 3235             } else {
 3236                 size_t bulk = 0;
 3237 #if defined(AES_GCM_ASM2)
 3238                 if (len >= 32 && AES_GCM_ASM2(gctx)) {
 3239                     size_t res = (16 - gctx->gcm.mres) % 16;
 3240 
 3241                     if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
 3242                         return -1;
 3243 
 3244                     bulk = AES_gcm_encrypt(in + res,
 3245                                            out + res, len - res,
 3246                                            gctx->gcm.key, gctx->gcm.Yi.c,
 3247                                            gctx->gcm.Xi.u);
 3248                     gctx->gcm.len.u[1] += bulk;
 3249                     bulk += res;
 3250                 }
 3251 #endif
 3252                 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
 3253                                           in + bulk, out + bulk, len - bulk))
 3254                     return -1;
 3255             }
 3256         } else {
 3257             if (gctx->ctr) {
 3258                 size_t bulk = 0;
 3259 #if defined(AES_GCM_ASM)
 3260                 if (len >= 16 && AES_GCM_ASM(gctx)) {
 3261                     size_t res = (16 - gctx->gcm.mres) % 16;
 3262 
 3263                     if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
 3264                         return -1;
 3265 
 3266                     bulk = AES_gcm_decrypt(in + res,
 3267                                            out + res, len - res,
 3268                                            gctx->gcm.key,
 3269                                            gctx->gcm.Yi.c, gctx->gcm.Xi.u);
 3270                     gctx->gcm.len.u[1] += bulk;
 3271                     bulk += res;
 3272                 }
 3273 #endif
 3274                 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
 3275                                                 in + bulk,
 3276                                                 out + bulk,
 3277                                                 len - bulk, gctx->ctr))
 3278                     return -1;
 3279             } else {
 3280                 size_t bulk = 0;
 3281 #if defined(AES_GCM_ASM2)
 3282                 if (len >= 16 && AES_GCM_ASM2(gctx)) {
 3283                     size_t res = (16 - gctx->gcm.mres) % 16;
 3284 
 3285                     if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
 3286                         return -1;
 3287 
 3288                     bulk = AES_gcm_decrypt(in + res,
 3289                                            out + res, len - res,
 3290                                            gctx->gcm.key,
 3291                                            gctx->gcm.Yi.c, gctx->gcm.Xi.u);
 3292                     gctx->gcm.len.u[1] += bulk;
 3293                     bulk += res;
 3294                 }
 3295 #endif
 3296                 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
 3297                                           in + bulk, out + bulk, len - bulk))
 3298                     return -1;
 3299             }
 3300         }
 3301         return len;
 3302     } else {
 3303         if (!ctx->encrypt) {
 3304             if (gctx->taglen < 0)
 3305                 return -1;
 3306             if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
 3307                 return -1;
 3308             gctx->iv_set = 0;
 3309             return 0;
 3310         }
 3311         CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
 3312         gctx->taglen = 16;
 3313         /* Don't reuse the IV */
 3314         gctx->iv_set = 0;
 3315         return 0;
 3316     }
 3317 
 3318 }
 3319 
 3320 #define CUSTOM_FLAGS    (EVP_CIPH_FLAG_DEFAULT_ASN1 \
 3321                 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
 3322                 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
 3323                 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH)
 3324 
 3325 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
 3326                     EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
 3327     BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
 3328                     EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
 3329     BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
 3330                     EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
 3331 
 3332 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
 3333 {
 3334     EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
 3335 
 3336     if (type == EVP_CTRL_COPY) {
 3337         EVP_CIPHER_CTX *out = ptr;
 3338         EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
 3339 
 3340         if (xctx->xts.key1) {
 3341             if (xctx->xts.key1 != &xctx->ks1)
 3342                 return 0;
 3343             xctx_out->xts.key1 = &xctx_out->ks1;
 3344         }
 3345         if (xctx->xts.key2) {
 3346             if (xctx->xts.key2 != &xctx->ks2)
 3347                 return 0;
 3348             xctx_out->xts.key2 = &xctx_out->ks2;
 3349         }
 3350         return 1;
 3351     } else if (type != EVP_CTRL_INIT)
 3352         return -1;
 3353     /* key1 and key2 are used as an indicator both key and IV are set */
 3354     xctx->xts.key1 = NULL;
 3355     xctx->xts.key2 = NULL;
 3356     return 1;
 3357 }
 3358 
 3359 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
 3360                             const unsigned char *iv, int enc)
 3361 {
 3362     EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
 3363 
 3364     if (!iv && !key)
 3365         return 1;
 3366 
 3367     if (key)
 3368         do {
 3369             /* The key is two half length keys in reality */
 3370             const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
 3371 
 3372             /*
 3373              * Verify that the two keys are different.
 3374              *
 3375              * This addresses the vulnerability described in Rogaway's
 3376              * September 2004 paper:
 3377              *
 3378              *      "Efficient Instantiations of Tweakable Blockciphers and
 3379              *       Refinements to Modes OCB and PMAC".
 3380              *      (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
 3381              *
 3382              * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
 3383              * that:
 3384              *      "The check for Key_1 != Key_2 shall be done at any place
 3385              *       BEFORE using the keys in the XTS-AES algorithm to process
 3386              *       data with them."
 3387              */
 3388             if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
 3389                 EVPerr(EVP_F_AES_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
 3390                 return 0;
 3391             }
 3392 
 3393 #ifdef AES_XTS_ASM
 3394             xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
 3395 #else
 3396             xctx->stream = NULL;
 3397 #endif
 3398             /* key_len is two AES keys */
 3399 #ifdef HWAES_CAPABLE
 3400             if (HWAES_CAPABLE) {
 3401                 if (enc) {
 3402                     HWAES_set_encrypt_key(key,
 3403                                           EVP_CIPHER_CTX_key_length(ctx) * 4,
 3404                                           &xctx->ks1.ks);
 3405                     xctx->xts.block1 = (block128_f) HWAES_encrypt;
 3406 # ifdef HWAES_xts_encrypt
 3407                     xctx->stream = HWAES_xts_encrypt;
 3408 # endif
 3409                 } else {
 3410                     HWAES_set_decrypt_key(key,
 3411                                           EVP_CIPHER_CTX_key_length(ctx) * 4,
 3412                                           &xctx->ks1.ks);
 3413                     xctx->xts.block1 = (block128_f) HWAES_decrypt;
 3414 # ifdef HWAES_xts_decrypt
 3415                     xctx->stream = HWAES_xts_decrypt;
 3416 #endif
 3417                 }
 3418 
 3419                 HWAES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
 3420                                       EVP_CIPHER_CTX_key_length(ctx) * 4,
 3421                                       &xctx->ks2.ks);
 3422                 xctx->xts.block2 = (block128_f) HWAES_encrypt;
 3423 
 3424                 xctx->xts.key1 = &xctx->ks1;
 3425                 break;
 3426             } else
 3427 #endif
 3428 #ifdef BSAES_CAPABLE
 3429             if (BSAES_CAPABLE)
 3430                 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
 3431             else
 3432 #endif
 3433 #ifdef VPAES_CAPABLE
 3434             if (VPAES_CAPABLE) {
 3435                 if (enc) {
 3436                     vpaes_set_encrypt_key(key,
 3437                                           EVP_CIPHER_CTX_key_length(ctx) * 4,
 3438                                           &xctx->ks1.ks);
 3439                     xctx->xts.block1 = (block128_f) vpaes_encrypt;
 3440                 } else {
 3441                     vpaes_set_decrypt_key(key,
 3442                                           EVP_CIPHER_CTX_key_length(ctx) * 4,
 3443                                           &xctx->ks1.ks);
 3444                     xctx->xts.block1 = (block128_f) vpaes_decrypt;
 3445                 }
 3446 
 3447                 vpaes_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
 3448                                       EVP_CIPHER_CTX_key_length(ctx) * 4,
 3449                                       &xctx->ks2.ks);
 3450                 xctx->xts.block2 = (block128_f) vpaes_encrypt;
 3451 
 3452                 xctx->xts.key1 = &xctx->ks1;
 3453                 break;
 3454             } else
 3455 #endif
 3456                 (void)0;        /* terminate potentially open 'else' */
 3457 
 3458             if (enc) {
 3459                 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
 3460                                     &xctx->ks1.ks);
 3461                 xctx->xts.block1 = (block128_f) AES_encrypt;
 3462             } else {
 3463                 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
 3464                                     &xctx->ks1.ks);
 3465                 xctx->xts.block1 = (block128_f) AES_decrypt;
 3466             }
 3467 
 3468             AES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
 3469                                 EVP_CIPHER_CTX_key_length(ctx) * 4,
 3470                                 &xctx->ks2.ks);
 3471             xctx->xts.block2 = (block128_f) AES_encrypt;
 3472 
 3473             xctx->xts.key1 = &xctx->ks1;
 3474         } while (0);
 3475 
 3476     if (iv) {
 3477         xctx->xts.key2 = &xctx->ks2;
 3478         memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
 3479     }
 3480 
 3481     return 1;
 3482 }
 3483 
 3484 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 3485                           const unsigned char *in, size_t len)
 3486 {
 3487     EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
 3488     if (!xctx->xts.key1 || !xctx->xts.key2)
 3489         return 0;
 3490     if (!out || !in || len < AES_BLOCK_SIZE)
 3491         return 0;
 3492     if (xctx->stream)
 3493         (*xctx->stream) (in, out, len,
 3494                          xctx->xts.key1, xctx->xts.key2,
 3495                          EVP_CIPHER_CTX_iv_noconst(ctx));
 3496     else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
 3497                                    in, out, len,
 3498                                    EVP_CIPHER_CTX_encrypting(ctx)))
 3499         return 0;
 3500     return 1;
 3501 }
 3502 
 3503 #define aes_xts_cleanup NULL
 3504 
 3505 #define XTS_FLAGS       (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
 3506                          | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
 3507                          | EVP_CIPH_CUSTOM_COPY)
 3508 
 3509 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
 3510     BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
 3511 
 3512 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
 3513 {
 3514     EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
 3515     switch (type) {
 3516     case EVP_CTRL_INIT:
 3517         cctx->key_set = 0;
 3518         cctx->iv_set = 0;
 3519         cctx->L = 8;
 3520         cctx->M = 12;
 3521         cctx->tag_set = 0;
 3522         cctx->len_set = 0;
 3523         cctx->tls_aad_len = -1;
 3524         return 1;
 3525     case EVP_CTRL_GET_IVLEN:
 3526         *(int *)ptr = 15 - cctx->L;
 3527         return 1;
 3528     case EVP_CTRL_AEAD_TLS1_AAD:
 3529         /* Save the AAD for later use */
 3530         if (arg != EVP_AEAD_TLS1_AAD_LEN)
 3531             return 0;
 3532         memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
 3533         cctx->tls_aad_len = arg;
 3534         {
 3535             uint16_t len =
 3536                 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
 3537                 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
 3538             /* Correct length for explicit IV */
 3539             if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
 3540                 return 0;
 3541             len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
 3542             /* If decrypting correct for tag too */
 3543             if (!EVP_CIPHER_CTX_encrypting(c)) {
 3544                 if (len < cctx->M)
 3545                     return 0;
 3546                 len -= cctx->M;
 3547             }
 3548             EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
 3549             EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
 3550         }
 3551         /* Extra padding: tag appended to record */
 3552         return cctx->M;
 3553 
 3554     case EVP_CTRL_CCM_SET_IV_FIXED:
 3555         /* Sanity check length */
 3556         if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
 3557             return 0;
 3558         /* Just copy to first part of IV */
 3559         memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
 3560         return 1;
 3561 
 3562     case EVP_CTRL_AEAD_SET_IVLEN:
 3563         arg = 15 - arg;
 3564         /* fall thru */
 3565     case EVP_CTRL_CCM_SET_L:
 3566         if (arg < 2 || arg > 8)
 3567             return 0;
 3568         cctx->L = arg;
 3569         return 1;
 3570 
 3571     case EVP_CTRL_AEAD_SET_TAG:
 3572         if ((arg & 1) || arg < 4 || arg > 16)
 3573             return 0;
 3574         if (EVP_CIPHER_CTX_encrypting(c) && ptr)
 3575             return 0;
 3576         if (ptr) {
 3577             cctx->tag_set = 1;
 3578             memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
 3579         }
 3580         cctx->M = arg;
 3581         return 1;
 3582 
 3583     case EVP_CTRL_AEAD_GET_TAG:
 3584         if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
 3585             return 0;
 3586         if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
 3587             return 0;
 3588         cctx->tag_set = 0;
 3589         cctx->iv_set = 0;
 3590         cctx->len_set = 0;
 3591         return 1;
 3592 
 3593     case EVP_CTRL_COPY:
 3594         {
 3595             EVP_CIPHER_CTX *out = ptr;
 3596             EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
 3597             if (cctx->ccm.key) {
 3598                 if (cctx->ccm.key != &cctx->ks)
 3599                     return 0;
 3600                 cctx_out->ccm.key = &cctx_out->ks;
 3601             }
 3602             return 1;
 3603         }
 3604 
 3605     default:
 3606         return -1;
 3607 
 3608     }
 3609 }
 3610 
 3611 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
 3612                             const unsigned char *iv, int enc)
 3613 {
 3614     EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
 3615     if (!iv && !key)
 3616         return 1;
 3617     if (key)
 3618         do {
 3619 #ifdef HWAES_CAPABLE
 3620             if (HWAES_CAPABLE) {
 3621                 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
 3622                                       &cctx->ks.ks);
 3623 
 3624                 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
 3625                                    &cctx->ks, (block128_f) HWAES_encrypt);
 3626                 cctx->str = NULL;
 3627                 cctx->key_set = 1;
 3628                 break;
 3629             } else
 3630 #endif
 3631 #ifdef VPAES_CAPABLE
 3632             if (VPAES_CAPABLE) {
 3633                 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
 3634                                       &cctx->ks.ks);
 3635                 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
 3636                                    &cctx->ks, (block128_f) vpaes_encrypt);
 3637                 cctx->str = NULL;
 3638                 cctx->key_set = 1;
 3639                 break;
 3640             }
 3641 #endif
 3642             AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
 3643                                 &cctx->ks.ks);
 3644             CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
 3645                                &cctx->ks, (block128_f) AES_encrypt);
 3646             cctx->str = NULL;
 3647             cctx->key_set = 1;
 3648         } while (0);
 3649     if (iv) {
 3650         memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
 3651         cctx->iv_set = 1;
 3652     }
 3653     return 1;
 3654 }
 3655 
 3656 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 3657                               const unsigned char *in, size_t len)
 3658 {
 3659     EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
 3660     CCM128_CONTEXT *ccm = &cctx->ccm;
 3661     /* Encrypt/decrypt must be performed in place */
 3662     if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
 3663         return -1;
 3664     /* If encrypting set explicit IV from sequence number (start of AAD) */
 3665     if (EVP_CIPHER_CTX_encrypting(ctx))
 3666         memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
 3667                EVP_CCM_TLS_EXPLICIT_IV_LEN);
 3668     /* Get rest of IV from explicit IV */
 3669     memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
 3670            EVP_CCM_TLS_EXPLICIT_IV_LEN);
 3671     /* Correct length value */
 3672     len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
 3673     if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
 3674                             len))
 3675             return -1;
 3676     /* Use saved AAD */
 3677     CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
 3678     /* Fix buffer to point to payload */
 3679     in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
 3680     out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
 3681     if (EVP_CIPHER_CTX_encrypting(ctx)) {
 3682         if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
 3683                                                     cctx->str) :
 3684             CRYPTO_ccm128_encrypt(ccm, in, out, len))
 3685             return -1;
 3686         if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
 3687             return -1;
 3688         return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
 3689     } else {
 3690         if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
 3691                                                      cctx->str) :
 3692             !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
 3693             unsigned char tag[16];
 3694             if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
 3695                 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
 3696                     return len;
 3697             }
 3698         }
 3699         OPENSSL_cleanse(out, len);
 3700         return -1;
 3701     }
 3702 }
 3703 
 3704 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 3705                           const unsigned char *in, size_t len)
 3706 {
 3707     EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
 3708     CCM128_CONTEXT *ccm = &cctx->ccm;
 3709     /* If not set up, return error */
 3710     if (!cctx->key_set)
 3711         return -1;
 3712 
 3713     if (cctx->tls_aad_len >= 0)
 3714         return aes_ccm_tls_cipher(ctx, out, in, len);
 3715 
 3716     /* EVP_*Final() doesn't return any data */
 3717     if (in == NULL && out != NULL)
 3718         return 0;
 3719 
 3720     if (!cctx->iv_set)
 3721         return -1;
 3722 
 3723     if (!out) {
 3724         if (!in) {
 3725             if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
 3726                                     15 - cctx->L, len))
 3727                 return -1;
 3728             cctx->len_set = 1;
 3729             return len;
 3730         }
 3731         /* If have AAD need message length */
 3732         if (!cctx->len_set && len)
 3733             return -1;
 3734         CRYPTO_ccm128_aad(ccm, in, len);
 3735         return len;
 3736     }
 3737 
 3738     /* The tag must be set before actually decrypting data */
 3739     if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
 3740         return -1;
 3741 
 3742     /* If not set length yet do it */
 3743     if (!cctx->len_set) {
 3744         if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
 3745                                 15 - cctx->L, len))
 3746             return -1;
 3747         cctx->len_set = 1;
 3748     }
 3749     if (EVP_CIPHER_CTX_encrypting(ctx)) {
 3750         if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
 3751                                                     cctx->str) :
 3752             CRYPTO_ccm128_encrypt(ccm, in, out, len))
 3753             return -1;
 3754         cctx->tag_set = 1;
 3755         return len;
 3756     } else {
 3757         int rv = -1;
 3758         if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
 3759                                                      cctx->str) :
 3760             !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
 3761             unsigned char tag[16];
 3762             if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
 3763                 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
 3764                                    cctx->M))
 3765                     rv = len;
 3766             }
 3767         }
 3768         if (rv == -1)
 3769             OPENSSL_cleanse(out, len);
 3770         cctx->iv_set = 0;
 3771         cctx->tag_set = 0;
 3772         cctx->len_set = 0;
 3773         return rv;
 3774     }
 3775 }
 3776 
 3777 #define aes_ccm_cleanup NULL
 3778 
 3779 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
 3780                     EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
 3781     BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
 3782                         EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
 3783     BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
 3784                         EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
 3785 
 3786 typedef struct {
 3787     union {
 3788         double align;
 3789         AES_KEY ks;
 3790     } ks;
 3791     /* Indicates if IV has been set */
 3792     unsigned char *iv;
 3793 } EVP_AES_WRAP_CTX;
 3794 
 3795 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
 3796                              const unsigned char *iv, int enc)
 3797 {
 3798     EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
 3799     if (!iv && !key)
 3800         return 1;
 3801     if (key) {
 3802         if (EVP_CIPHER_CTX_encrypting(ctx))
 3803             AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
 3804                                 &wctx->ks.ks);
 3805         else
 3806             AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
 3807                                 &wctx->ks.ks);
 3808         if (!iv)
 3809             wctx->iv = NULL;
 3810     }
 3811     if (iv) {
 3812         memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
 3813         wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
 3814     }
 3815     return 1;
 3816 }
 3817 
 3818 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 3819                            const unsigned char *in, size_t inlen)
 3820 {
 3821     EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
 3822     size_t rv;
 3823     /* AES wrap with padding has IV length of 4, without padding 8 */
 3824     int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
 3825     /* No final operation so always return zero length */
 3826     if (!in)
 3827         return 0;
 3828     /* Input length must always be non-zero */
 3829     if (!inlen)
 3830         return -1;
 3831     /* If decrypting need at least 16 bytes and multiple of 8 */
 3832     if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
 3833         return -1;
 3834     /* If not padding input must be multiple of 8 */
 3835     if (!pad && inlen & 0x7)
 3836         return -1;
 3837     if (is_partially_overlapping(out, in, inlen)) {
 3838         EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
 3839         return 0;
 3840     }
 3841     if (!out) {
 3842         if (EVP_CIPHER_CTX_encrypting(ctx)) {
 3843             /* If padding round up to multiple of 8 */
 3844             if (pad)
 3845                 inlen = (inlen + 7) / 8 * 8;
 3846             /* 8 byte prefix */
 3847             return inlen + 8;
 3848         } else {
 3849             /*
 3850              * If not padding output will be exactly 8 bytes smaller than
 3851              * input. If padding it will be at least 8 bytes smaller but we
 3852              * don't know how much.
 3853              */
 3854             return inlen - 8;
 3855         }
 3856     }
 3857     if (pad) {
 3858         if (EVP_CIPHER_CTX_encrypting(ctx))
 3859             rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
 3860                                      out, in, inlen,
 3861                                      (block128_f) AES_encrypt);
 3862         else
 3863             rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
 3864                                        out, in, inlen,
 3865                                        (block128_f) AES_decrypt);
 3866     } else {
 3867         if (EVP_CIPHER_CTX_encrypting(ctx))
 3868             rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
 3869                                  out, in, inlen, (block128_f) AES_encrypt);
 3870         else
 3871             rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
 3872                                    out, in, inlen, (block128_f) AES_decrypt);
 3873     }
 3874     return rv ? (int)rv : -1;
 3875 }
 3876 
 3877 #define WRAP_FLAGS      (EVP_CIPH_WRAP_MODE \
 3878                 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
 3879                 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
 3880 
 3881 static const EVP_CIPHER aes_128_wrap = {
 3882     NID_id_aes128_wrap,
 3883     8, 16, 8, WRAP_FLAGS,
 3884     aes_wrap_init_key, aes_wrap_cipher,
 3885     NULL,
 3886     sizeof(EVP_AES_WRAP_CTX),
 3887     NULL, NULL, NULL, NULL
 3888 };
 3889 
 3890 const EVP_CIPHER *EVP_aes_128_wrap(void)
 3891 {
 3892     return &aes_128_wrap;
 3893 }
 3894 
 3895 static const EVP_CIPHER aes_192_wrap = {
 3896     NID_id_aes192_wrap,
 3897     8, 24, 8, WRAP_FLAGS,
 3898     aes_wrap_init_key, aes_wrap_cipher,
 3899     NULL,
 3900     sizeof(EVP_AES_WRAP_CTX),
 3901     NULL, NULL, NULL, NULL
 3902 };
 3903 
 3904 const EVP_CIPHER *EVP_aes_192_wrap(void)
 3905 {
 3906     return &aes_192_wrap;
 3907 }
 3908 
 3909 static const EVP_CIPHER aes_256_wrap = {
 3910     NID_id_aes256_wrap,
 3911     8, 32, 8, WRAP_FLAGS,
 3912     aes_wrap_init_key, aes_wrap_cipher,
 3913     NULL,
 3914     sizeof(EVP_AES_WRAP_CTX),
 3915     NULL, NULL, NULL, NULL
 3916 };
 3917 
 3918 const EVP_CIPHER *EVP_aes_256_wrap(void)
 3919 {
 3920     return &aes_256_wrap;
 3921 }
 3922 
 3923 static const EVP_CIPHER aes_128_wrap_pad = {
 3924     NID_id_aes128_wrap_pad,
 3925     8, 16, 4, WRAP_FLAGS,
 3926     aes_wrap_init_key, aes_wrap_cipher,
 3927     NULL,
 3928     sizeof(EVP_AES_WRAP_CTX),
 3929     NULL, NULL, NULL, NULL
 3930 };
 3931 
 3932 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
 3933 {
 3934     return &aes_128_wrap_pad;
 3935 }
 3936 
 3937 static const EVP_CIPHER aes_192_wrap_pad = {
 3938     NID_id_aes192_wrap_pad,
 3939     8, 24, 4, WRAP_FLAGS,
 3940     aes_wrap_init_key, aes_wrap_cipher,
 3941     NULL,
 3942     sizeof(EVP_AES_WRAP_CTX),
 3943     NULL, NULL, NULL, NULL
 3944 };
 3945 
 3946 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
 3947 {
 3948     return &aes_192_wrap_pad;
 3949 }
 3950 
 3951 static const EVP_CIPHER aes_256_wrap_pad = {
 3952     NID_id_aes256_wrap_pad,
 3953     8, 32, 4, WRAP_FLAGS,
 3954     aes_wrap_init_key, aes_wrap_cipher,
 3955     NULL,
 3956     sizeof(EVP_AES_WRAP_CTX),
 3957     NULL, NULL, NULL, NULL
 3958 };
 3959 
 3960 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
 3961 {
 3962     return &aes_256_wrap_pad;
 3963 }
 3964 
 3965 #ifndef OPENSSL_NO_OCB
 3966 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
 3967 {
 3968     EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
 3969     EVP_CIPHER_CTX *newc;
 3970     EVP_AES_OCB_CTX *new_octx;
 3971 
 3972     switch (type) {
 3973     case EVP_CTRL_INIT:
 3974         octx->key_set = 0;
 3975         octx->iv_set = 0;
 3976         octx->ivlen = EVP_CIPHER_iv_length(c->cipher);
 3977         octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
 3978         octx->taglen = 16;
 3979         octx->data_buf_len = 0;
 3980         octx->aad_buf_len = 0;
 3981         return 1;
 3982 
 3983     case EVP_CTRL_GET_IVLEN:
 3984         *(int *)ptr = octx->ivlen;
 3985         return 1;
 3986 
 3987     case EVP_CTRL_AEAD_SET_IVLEN:
 3988         /* IV len must be 1 to 15 */
 3989         if (arg <= 0 || arg > 15)
 3990             return 0;
 3991 
 3992         octx->ivlen = arg;
 3993         return 1;
 3994 
 3995     case EVP_CTRL_AEAD_SET_TAG:
 3996         if (!ptr) {
 3997             /* Tag len must be 0 to 16 */
 3998             if (arg < 0 || arg > 16)
 3999                 return 0;
 4000 
 4001             octx->taglen = arg;
 4002             return 1;
 4003         }
 4004         if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
 4005             return 0;
 4006         memcpy(octx->tag, ptr, arg);
 4007         return 1;
 4008 
 4009     case EVP_CTRL_AEAD_GET_TAG:
 4010         if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
 4011             return 0;
 4012 
 4013         memcpy(ptr, octx->tag, arg);
 4014         return 1;
 4015 
 4016     case EVP_CTRL_COPY:
 4017         newc = (EVP_CIPHER_CTX *)ptr;
 4018         new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
 4019         return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
 4020                                       &new_octx->ksenc.ks,
 4021                                       &new_octx->ksdec.ks);
 4022 
 4023     default:
 4024         return -1;
 4025 
 4026     }
 4027 }
 4028 
 4029 # ifdef HWAES_CAPABLE
 4030 #  ifdef HWAES_ocb_encrypt
 4031 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
 4032                        size_t blocks, const void *key,
 4033                        size_t start_block_num,
 4034                        unsigned char offset_i[16],
 4035                        const unsigned char L_[][16],
 4036                        unsigned char checksum[16]);
 4037 #  else
 4038 #    define HWAES_ocb_encrypt ((ocb128_f)NULL)
 4039 #  endif
 4040 #  ifdef HWAES_ocb_decrypt
 4041 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
 4042                        size_t blocks, const void *key,
 4043                        size_t start_block_num,
 4044                        unsigned char offset_i[16],
 4045                        const unsigned char L_[][16],
 4046                        unsigned char checksum[16]);
 4047 #  else
 4048 #    define HWAES_ocb_decrypt ((ocb128_f)NULL)
 4049 #  endif
 4050 # endif
 4051 
 4052 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
 4053                             const unsigned char *iv, int enc)
 4054 {
 4055     EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
 4056     if (!iv && !key)
 4057         return 1;
 4058     if (key) {
 4059         do {
 4060             /*
 4061              * We set both the encrypt and decrypt key here because decrypt
 4062              * needs both. We could possibly optimise to remove setting the
 4063              * decrypt for an encryption operation.
 4064              */
 4065 # ifdef HWAES_CAPABLE
 4066             if (HWAES_CAPABLE) {
 4067                 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
 4068                                       &octx->ksenc.ks);
 4069                 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
 4070                                       &octx->ksdec.ks);
 4071                 if (!CRYPTO_ocb128_init(&octx->ocb,
 4072                                         &octx->ksenc.ks, &octx->ksdec.ks,
 4073                                         (block128_f) HWAES_encrypt,
 4074                                         (block128_f) HWAES_decrypt,
 4075                                         enc ? HWAES_ocb_encrypt
 4076                                             : HWAES_ocb_decrypt))
 4077                     return 0;
 4078                 break;
 4079             }
 4080 # endif
 4081 # ifdef VPAES_CAPABLE
 4082             if (VPAES_CAPABLE) {
 4083                 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
 4084                                       &octx->ksenc.ks);
 4085                 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
 4086                                       &octx->ksdec.ks);
 4087                 if (!CRYPTO_ocb128_init(&octx->ocb,
 4088                                         &octx->ksenc.ks, &octx->ksdec.ks,
 4089                                         (block128_f) vpaes_encrypt,
 4090                                         (block128_f) vpaes_decrypt,
 4091                                         NULL))
 4092                     return 0;
 4093                 break;
 4094             }
 4095 # endif
 4096             AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
 4097                                 &octx->ksenc.ks);
 4098             AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
 4099                                 &octx->ksdec.ks);
 4100             if (!CRYPTO_ocb128_init(&octx->ocb,
 4101                                     &octx->ksenc.ks, &octx->ksdec.ks,
 4102                                     (block128_f) AES_encrypt,
 4103                                     (block128_f) AES_decrypt,
 4104                                     NULL))
 4105                 return 0;
 4106         }
 4107         while (0);
 4108 
 4109         /*
 4110          * If we have an iv we can set it directly, otherwise use saved IV.
 4111          */
 4112         if (iv == NULL && octx->iv_set)
 4113             iv = octx->iv;
 4114         if (iv) {
 4115             if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
 4116                 != 1)
 4117                 return 0;
 4118             octx->iv_set = 1;
 4119         }
 4120         octx->key_set = 1;
 4121     } else {
 4122         /* If key set use IV, otherwise copy */
 4123         if (octx->key_set)
 4124             CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
 4125         else
 4126             memcpy(octx->iv, iv, octx->ivlen);
 4127         octx->iv_set = 1;
 4128     }
 4129     return 1;
 4130 }
 4131 
 4132 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
 4133                           const unsigned char *in, size_t len)
 4134 {
 4135     unsigned char *buf;
 4136     int *buf_len;
 4137     int written_len = 0;
 4138     size_t trailing_len;
 4139     EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
 4140 
 4141     /* If IV or Key not set then return error */
 4142     if (!octx->iv_set)
 4143         return -1;
 4144 
 4145     if (!octx->key_set)
 4146         return -1;
 4147 
 4148     if (in != NULL) {
 4149         /*
 4150          * Need to ensure we are only passing full blocks to low level OCB
 4151          * routines. We do it here rather than in EVP_EncryptUpdate/
 4152          * EVP_DecryptUpdate because we need to pass full blocks of AAD too
 4153          * and those routines don't support that
 4154          */
 4155 
 4156         /* Are we dealing with AAD or normal data here? */
 4157         if (out == NULL) {
 4158             buf = octx->aad_buf;
 4159             buf_len = &(octx->aad_buf_len);
 4160         } else {
 4161             buf = octx->data_buf;
 4162             buf_len = &(octx->data_buf_len);
 4163 
 4164             if (is_partially_overlapping(out + *buf_len, in, len)) {
 4165                 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
 4166                 return 0;
 4167             }
 4168         }
 4169 
 4170         /*
 4171          * If we've got a partially filled buffer from a previous call then
 4172          * use that data first
 4173          */
 4174         if (*buf_len > 0) {
 4175             unsigned int remaining;
 4176 
 4177             remaining = AES_BLOCK_SIZE - (*buf_len);
 4178             if (remaining > len) {
 4179                 memcpy(buf + (*buf_len), in, len);
 4180                 *(buf_len) += len;
 4181                 return 0;
 4182             }
 4183             memcpy(buf + (*buf_len), in, remaining);
 4184 
 4185             /*
 4186              * If we get here we've filled the buffer, so process it
 4187              */
 4188             len -= remaining;
 4189             in += remaining;
 4190             if (out == NULL) {
 4191                 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
 4192                     return -1;
 4193             } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
 4194                 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
 4195                                            AES_BLOCK_SIZE))
 4196                     return -1;
 4197             } else {
 4198                 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
 4199                                            AES_BLOCK_SIZE))
 4200                     return -1;
 4201             }
 4202             written_len = AES_BLOCK_SIZE;
 4203             *buf_len = 0;
 4204             if (out != NULL)
 4205                 out += AES_BLOCK_SIZE;
 4206         }
 4207 
 4208         /* Do we have a partial block to handle at the end? */
 4209         trailing_len = len % AES_BLOCK_SIZE;
 4210 
 4211         /*
 4212          * If we've got some full blocks to handle, then process these first
 4213          */
 4214         if (len != trailing_len) {
 4215             if (out == NULL) {
 4216                 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
 4217                     return -1;
 4218             } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
 4219                 if (!CRYPTO_ocb128_encrypt
 4220                     (&octx->ocb, in, out, len - trailing_len))
 4221                     return -1;
 4222             } else {
 4223                 if (!CRYPTO_ocb128_decrypt
 4224                     (&octx->ocb, in, out, len - trailing_len))
 4225                     return -1;
 4226             }
 4227             written_len += len - trailing_len;
 4228             in += len - trailing_len;
 4229         }
 4230 
 4231         /* Handle any trailing partial block */
 4232         if (trailing_len > 0) {
 4233             memcpy(buf, in, trailing_len);
 4234             *buf_len = trailing_len;
 4235         }
 4236 
 4237         return written_len;
 4238     } else {
 4239         /*
 4240          * First of all empty the buffer of any partial block that we might
 4241          * have been provided - both for data and AAD
 4242          */
 4243         if (octx->data_buf_len > 0) {
 4244             if (EVP_CIPHER_CTX_encrypting(ctx)) {
 4245                 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
 4246                                            octx->data_buf_len))
 4247                     return -1;
 4248             } else {
 4249                 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
 4250                                            octx->data_buf_len))
 4251                     return -1;
 4252             }
 4253             written_len = octx->data_buf_len;
 4254             octx->data_buf_len = 0;
 4255         }
 4256         if (octx->aad_buf_len > 0) {
 4257             if (!CRYPTO_ocb128_aad
 4258                 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
 4259                 return -1;
 4260             octx->aad_buf_len = 0;
 4261         }
 4262         /* If decrypting then verify */
 4263         if (!EVP_CIPHER_CTX_encrypting(ctx)) {
 4264             if (octx->taglen < 0)
 4265                 return -1;
 4266             if (CRYPTO_ocb128_finish(&octx->ocb,
 4267                                      octx->tag, octx->taglen) != 0)
 4268                 return -1;
 4269             octx->iv_set = 0;
 4270             return written_len;
 4271         }
 4272         /* If encrypting then just get the tag */
 4273         if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
 4274             return -1;
 4275         /* Don't reuse the IV */
 4276         octx->iv_set = 0;
 4277         return written_len;
 4278     }
 4279 }
 4280 
 4281 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
 4282 {
 4283     EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
 4284     CRYPTO_ocb128_cleanup(&octx->ocb);
 4285     return 1;
 4286 }
 4287 
 4288 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
 4289                     EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
 4290 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
 4291                     EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
 4292 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
 4293                     EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
 4294 #endif                         /* OPENSSL_NO_OCB */