2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify
17 * it under the terms of the GNU General Public License as published by
18 * the Free Software Foundation; either version 2 of the License, or
19 * (at your option) any later version.
22 #include <linux/hardirq.h>
23 #include <linux/types.h>
24 #include <linux/module.h>
25 #include <linux/err.h>
26 #include <crypto/algapi.h>
27 #include <crypto/aes.h>
28 #include <crypto/cryptd.h>
29 #include <crypto/ctr.h>
30 #include <crypto/b128ops.h>
31 #include <crypto/gcm.h>
32 #include <crypto/xts.h>
33 #include <asm/cpu_device_id.h>
34 #include <asm/fpu/api.h>
35 #include <asm/crypto/aes.h>
36 #include <crypto/scatterwalk.h>
37 #include <crypto/internal/aead.h>
38 #include <crypto/internal/simd.h>
39 #include <crypto/internal/skcipher.h>
40 #include <linux/workqueue.h>
41 #include <linux/spinlock.h>
43 #include <asm/crypto/glue_helper.h>
47 #define AESNI_ALIGN 16
48 #define AESNI_ALIGN_ATTR __attribute__ ((__aligned__(AESNI_ALIGN)))
49 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1))
50 #define RFC4106_HASH_SUBKEY_SIZE 16
51 #define AESNI_ALIGN_EXTRA ((AESNI_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
52 #define CRYPTO_AES_CTX_SIZE (sizeof(struct crypto_aes_ctx) + AESNI_ALIGN_EXTRA)
53 #define XTS_AES_CTX_SIZE (sizeof(struct aesni_xts_ctx) + AESNI_ALIGN_EXTRA)
55 /* This data is stored at the end of the crypto_tfm struct.
56 * It's a type of per "session" data storage location.
57 * This needs to be 16 byte aligned.
59 struct aesni_rfc4106_gcm_ctx {
60 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
61 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
65 struct generic_gcmaes_ctx {
66 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
67 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
70 struct aesni_xts_ctx {
71 u8 raw_tweak_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
72 u8 raw_crypt_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
75 #define GCM_BLOCK_LEN 16
77 struct gcm_context_data {
78 /* init, update and finalize context data */
79 u8 aad_hash[GCM_BLOCK_LEN];
82 u8 partial_block_enc_key[GCM_BLOCK_LEN];
83 u8 orig_IV[GCM_BLOCK_LEN];
84 u8 current_counter[GCM_BLOCK_LEN];
85 u64 partial_block_len;
87 u8 hash_keys[GCM_BLOCK_LEN * 16];
90 asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
91 unsigned int key_len);
92 asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
94 asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
96 asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
97 const u8 *in, unsigned int len);
98 asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
99 const u8 *in, unsigned int len);
100 asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
101 const u8 *in, unsigned int len, u8 *iv);
102 asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
103 const u8 *in, unsigned int len, u8 *iv);
105 #define AVX_GEN2_OPTSIZE 640
106 #define AVX_GEN4_OPTSIZE 4096
110 static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out,
111 const u8 *in, unsigned int len, u8 *iv);
112 asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
113 const u8 *in, unsigned int len, u8 *iv);
115 asmlinkage void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, u8 *out,
116 const u8 *in, bool enc, u8 *iv);
118 /* asmlinkage void aesni_gcm_enc()
119 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
120 * struct gcm_context_data. May be uninitialized.
121 * u8 *out, Ciphertext output. Encrypt in-place is allowed.
122 * const u8 *in, Plaintext input
123 * unsigned long plaintext_len, Length of data in bytes for encryption.
124 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
125 * 16-byte aligned pointer.
126 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
127 * const u8 *aad, Additional Authentication Data (AAD)
128 * unsigned long aad_len, Length of AAD in bytes.
129 * u8 *auth_tag, Authenticated Tag output.
130 * unsigned long auth_tag_len), Authenticated Tag Length in bytes.
131 * Valid values are 16 (most likely), 12 or 8.
133 asmlinkage void aesni_gcm_enc(void *ctx,
134 struct gcm_context_data *gdata, u8 *out,
135 const u8 *in, unsigned long plaintext_len, u8 *iv,
136 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
137 u8 *auth_tag, unsigned long auth_tag_len);
139 /* asmlinkage void aesni_gcm_dec()
140 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
141 * struct gcm_context_data. May be uninitialized.
142 * u8 *out, Plaintext output. Decrypt in-place is allowed.
143 * const u8 *in, Ciphertext input
144 * unsigned long ciphertext_len, Length of data in bytes for decryption.
145 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
146 * 16-byte aligned pointer.
147 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
148 * const u8 *aad, Additional Authentication Data (AAD)
149 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going
150 * to be 8 or 12 bytes
151 * u8 *auth_tag, Authenticated Tag output.
152 * unsigned long auth_tag_len) Authenticated Tag Length in bytes.
153 * Valid values are 16 (most likely), 12 or 8.
155 asmlinkage void aesni_gcm_dec(void *ctx,
156 struct gcm_context_data *gdata, u8 *out,
157 const u8 *in, unsigned long ciphertext_len, u8 *iv,
158 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
159 u8 *auth_tag, unsigned long auth_tag_len);
161 /* Scatter / Gather routines, with args similar to above */
162 asmlinkage void aesni_gcm_init(void *ctx,
163 struct gcm_context_data *gdata,
165 u8 *hash_subkey, const u8 *aad,
166 unsigned long aad_len);
167 asmlinkage void aesni_gcm_enc_update(void *ctx,
168 struct gcm_context_data *gdata, u8 *out,
169 const u8 *in, unsigned long plaintext_len);
170 asmlinkage void aesni_gcm_dec_update(void *ctx,
171 struct gcm_context_data *gdata, u8 *out,
173 unsigned long ciphertext_len);
174 asmlinkage void aesni_gcm_finalize(void *ctx,
175 struct gcm_context_data *gdata,
176 u8 *auth_tag, unsigned long auth_tag_len);
178 static struct aesni_gcm_tfm_s {
179 void (*init)(void *ctx,
180 struct gcm_context_data *gdata,
182 u8 *hash_subkey, const u8 *aad,
183 unsigned long aad_len);
184 void (*enc_update)(void *ctx,
185 struct gcm_context_data *gdata, u8 *out,
187 unsigned long plaintext_len);
188 void (*dec_update)(void *ctx,
189 struct gcm_context_data *gdata, u8 *out,
191 unsigned long ciphertext_len);
192 void (*finalize)(void *ctx,
193 struct gcm_context_data *gdata,
194 u8 *auth_tag, unsigned long auth_tag_len);
197 struct aesni_gcm_tfm_s aesni_gcm_tfm_sse = {
198 .init = &aesni_gcm_init,
199 .enc_update = &aesni_gcm_enc_update,
200 .dec_update = &aesni_gcm_dec_update,
201 .finalize = &aesni_gcm_finalize,
205 asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv,
206 void *keys, u8 *out, unsigned int num_bytes);
207 asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv,
208 void *keys, u8 *out, unsigned int num_bytes);
209 asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv,
210 void *keys, u8 *out, unsigned int num_bytes);
212 * asmlinkage void aesni_gcm_init_avx_gen2()
213 * gcm_data *my_ctx_data, context data
214 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
216 asmlinkage void aesni_gcm_init_avx_gen2(void *my_ctx_data,
217 struct gcm_context_data *gdata,
221 unsigned long aad_len);
223 asmlinkage void aesni_gcm_enc_update_avx_gen2(void *ctx,
224 struct gcm_context_data *gdata, u8 *out,
225 const u8 *in, unsigned long plaintext_len);
226 asmlinkage void aesni_gcm_dec_update_avx_gen2(void *ctx,
227 struct gcm_context_data *gdata, u8 *out,
229 unsigned long ciphertext_len);
230 asmlinkage void aesni_gcm_finalize_avx_gen2(void *ctx,
231 struct gcm_context_data *gdata,
232 u8 *auth_tag, unsigned long auth_tag_len);
234 asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx,
235 struct gcm_context_data *gdata, u8 *out,
236 const u8 *in, unsigned long plaintext_len, u8 *iv,
237 const u8 *aad, unsigned long aad_len,
238 u8 *auth_tag, unsigned long auth_tag_len);
240 asmlinkage void aesni_gcm_dec_avx_gen2(void *ctx,
241 struct gcm_context_data *gdata, u8 *out,
242 const u8 *in, unsigned long ciphertext_len, u8 *iv,
243 const u8 *aad, unsigned long aad_len,
244 u8 *auth_tag, unsigned long auth_tag_len);
246 struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen2 = {
247 .init = &aesni_gcm_init_avx_gen2,
248 .enc_update = &aesni_gcm_enc_update_avx_gen2,
249 .dec_update = &aesni_gcm_dec_update_avx_gen2,
250 .finalize = &aesni_gcm_finalize_avx_gen2,
255 #ifdef CONFIG_AS_AVX2
257 * asmlinkage void aesni_gcm_init_avx_gen4()
258 * gcm_data *my_ctx_data, context data
259 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
261 asmlinkage void aesni_gcm_init_avx_gen4(void *my_ctx_data,
262 struct gcm_context_data *gdata,
266 unsigned long aad_len);
268 asmlinkage void aesni_gcm_enc_update_avx_gen4(void *ctx,
269 struct gcm_context_data *gdata, u8 *out,
270 const u8 *in, unsigned long plaintext_len);
271 asmlinkage void aesni_gcm_dec_update_avx_gen4(void *ctx,
272 struct gcm_context_data *gdata, u8 *out,
274 unsigned long ciphertext_len);
275 asmlinkage void aesni_gcm_finalize_avx_gen4(void *ctx,
276 struct gcm_context_data *gdata,
277 u8 *auth_tag, unsigned long auth_tag_len);
279 asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx,
280 struct gcm_context_data *gdata, u8 *out,
281 const u8 *in, unsigned long plaintext_len, u8 *iv,
282 const u8 *aad, unsigned long aad_len,
283 u8 *auth_tag, unsigned long auth_tag_len);
285 asmlinkage void aesni_gcm_dec_avx_gen4(void *ctx,
286 struct gcm_context_data *gdata, u8 *out,
287 const u8 *in, unsigned long ciphertext_len, u8 *iv,
288 const u8 *aad, unsigned long aad_len,
289 u8 *auth_tag, unsigned long auth_tag_len);
291 struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen4 = {
292 .init = &aesni_gcm_init_avx_gen4,
293 .enc_update = &aesni_gcm_enc_update_avx_gen4,
294 .dec_update = &aesni_gcm_dec_update_avx_gen4,
295 .finalize = &aesni_gcm_finalize_avx_gen4,
301 aesni_rfc4106_gcm_ctx *aesni_rfc4106_gcm_ctx_get(struct crypto_aead *tfm)
303 unsigned long align = AESNI_ALIGN;
305 if (align <= crypto_tfm_ctx_alignment())
307 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
311 generic_gcmaes_ctx *generic_gcmaes_ctx_get(struct crypto_aead *tfm)
313 unsigned long align = AESNI_ALIGN;
315 if (align <= crypto_tfm_ctx_alignment())
317 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
321 static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
323 unsigned long addr = (unsigned long)raw_ctx;
324 unsigned long align = AESNI_ALIGN;
326 if (align <= crypto_tfm_ctx_alignment())
328 return (struct crypto_aes_ctx *)ALIGN(addr, align);
331 static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
332 const u8 *in_key, unsigned int key_len)
334 struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
335 u32 *flags = &tfm->crt_flags;
338 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
339 key_len != AES_KEYSIZE_256) {
340 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
344 if (!irq_fpu_usable())
345 err = crypto_aes_expand_key(ctx, in_key, key_len);
348 err = aesni_set_key(ctx, in_key, key_len);
355 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
356 unsigned int key_len)
358 return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
361 static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
363 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
365 if (!irq_fpu_usable())
366 crypto_aes_encrypt_x86(ctx, dst, src);
369 aesni_enc(ctx, dst, src);
374 static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
376 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
378 if (!irq_fpu_usable())
379 crypto_aes_decrypt_x86(ctx, dst, src);
382 aesni_dec(ctx, dst, src);
387 static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
389 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
391 aesni_enc(ctx, dst, src);
394 static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
396 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
398 aesni_dec(ctx, dst, src);
401 static int aesni_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
404 return aes_set_key_common(crypto_skcipher_tfm(tfm),
405 crypto_skcipher_ctx(tfm), key, len);
408 static int ecb_encrypt(struct skcipher_request *req)
410 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
411 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
412 struct skcipher_walk walk;
416 err = skcipher_walk_virt(&walk, req, true);
419 while ((nbytes = walk.nbytes)) {
420 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
421 nbytes & AES_BLOCK_MASK);
422 nbytes &= AES_BLOCK_SIZE - 1;
423 err = skcipher_walk_done(&walk, nbytes);
430 static int ecb_decrypt(struct skcipher_request *req)
432 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
433 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
434 struct skcipher_walk walk;
438 err = skcipher_walk_virt(&walk, req, true);
441 while ((nbytes = walk.nbytes)) {
442 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
443 nbytes & AES_BLOCK_MASK);
444 nbytes &= AES_BLOCK_SIZE - 1;
445 err = skcipher_walk_done(&walk, nbytes);
452 static int cbc_encrypt(struct skcipher_request *req)
454 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
455 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
456 struct skcipher_walk walk;
460 err = skcipher_walk_virt(&walk, req, true);
463 while ((nbytes = walk.nbytes)) {
464 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
465 nbytes & AES_BLOCK_MASK, walk.iv);
466 nbytes &= AES_BLOCK_SIZE - 1;
467 err = skcipher_walk_done(&walk, nbytes);
474 static int cbc_decrypt(struct skcipher_request *req)
476 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
477 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
478 struct skcipher_walk walk;
482 err = skcipher_walk_virt(&walk, req, true);
485 while ((nbytes = walk.nbytes)) {
486 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
487 nbytes & AES_BLOCK_MASK, walk.iv);
488 nbytes &= AES_BLOCK_SIZE - 1;
489 err = skcipher_walk_done(&walk, nbytes);
497 static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
498 struct skcipher_walk *walk)
500 u8 *ctrblk = walk->iv;
501 u8 keystream[AES_BLOCK_SIZE];
502 u8 *src = walk->src.virt.addr;
503 u8 *dst = walk->dst.virt.addr;
504 unsigned int nbytes = walk->nbytes;
506 aesni_enc(ctx, keystream, ctrblk);
507 crypto_xor_cpy(dst, keystream, src, nbytes);
509 crypto_inc(ctrblk, AES_BLOCK_SIZE);
513 static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
514 const u8 *in, unsigned int len, u8 *iv)
517 * based on key length, override with the by8 version
518 * of ctr mode encryption/decryption for improved performance
519 * aes_set_key_common() ensures that key length is one of
522 if (ctx->key_length == AES_KEYSIZE_128)
523 aes_ctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len);
524 else if (ctx->key_length == AES_KEYSIZE_192)
525 aes_ctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len);
527 aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len);
531 static int ctr_crypt(struct skcipher_request *req)
533 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
534 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
535 struct skcipher_walk walk;
539 err = skcipher_walk_virt(&walk, req, true);
542 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
543 aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr,
544 nbytes & AES_BLOCK_MASK, walk.iv);
545 nbytes &= AES_BLOCK_SIZE - 1;
546 err = skcipher_walk_done(&walk, nbytes);
549 ctr_crypt_final(ctx, &walk);
550 err = skcipher_walk_done(&walk, 0);
557 static int xts_aesni_setkey(struct crypto_skcipher *tfm, const u8 *key,
560 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
563 err = xts_verify_key(tfm, key, keylen);
569 /* first half of xts-key is for crypt */
570 err = aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_crypt_ctx,
575 /* second half of xts-key is for tweak */
576 return aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_tweak_ctx,
577 key + keylen, keylen);
581 static void aesni_xts_tweak(void *ctx, u8 *out, const u8 *in)
583 aesni_enc(ctx, out, in);
586 static void aesni_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
588 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_enc));
591 static void aesni_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
593 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_dec));
596 static void aesni_xts_enc8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
598 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, true, (u8 *)iv);
601 static void aesni_xts_dec8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
603 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, false, (u8 *)iv);
606 static const struct common_glue_ctx aesni_enc_xts = {
608 .fpu_blocks_limit = 1,
612 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc8) }
615 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc) }
619 static const struct common_glue_ctx aesni_dec_xts = {
621 .fpu_blocks_limit = 1,
625 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec8) }
628 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec) }
632 static int xts_encrypt(struct skcipher_request *req)
634 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
635 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
637 return glue_xts_req_128bit(&aesni_enc_xts, req,
638 XTS_TWEAK_CAST(aesni_xts_tweak),
639 aes_ctx(ctx->raw_tweak_ctx),
640 aes_ctx(ctx->raw_crypt_ctx));
643 static int xts_decrypt(struct skcipher_request *req)
645 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
646 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
648 return glue_xts_req_128bit(&aesni_dec_xts, req,
649 XTS_TWEAK_CAST(aesni_xts_tweak),
650 aes_ctx(ctx->raw_tweak_ctx),
651 aes_ctx(ctx->raw_crypt_ctx));
654 static int rfc4106_init(struct crypto_aead *aead)
656 struct cryptd_aead *cryptd_tfm;
657 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
659 cryptd_tfm = cryptd_alloc_aead("__driver-gcm-aes-aesni",
661 CRYPTO_ALG_INTERNAL);
662 if (IS_ERR(cryptd_tfm))
663 return PTR_ERR(cryptd_tfm);
666 crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base));
670 static void rfc4106_exit(struct crypto_aead *aead)
672 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
674 cryptd_free_aead(*ctx);
678 rfc4106_set_hash_subkey(u8 *hash_subkey, const u8 *key, unsigned int key_len)
680 struct crypto_cipher *tfm;
683 tfm = crypto_alloc_cipher("aes", 0, 0);
687 ret = crypto_cipher_setkey(tfm, key, key_len);
689 goto out_free_cipher;
691 /* Clear the data in the hash sub key container to zero.*/
692 /* We want to cipher all zeros to create the hash sub key. */
693 memset(hash_subkey, 0, RFC4106_HASH_SUBKEY_SIZE);
695 crypto_cipher_encrypt_one(tfm, hash_subkey, hash_subkey);
698 crypto_free_cipher(tfm);
702 static int common_rfc4106_set_key(struct crypto_aead *aead, const u8 *key,
703 unsigned int key_len)
705 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(aead);
708 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
711 /*Account for 4 byte nonce at the end.*/
714 memcpy(ctx->nonce, key + key_len, sizeof(ctx->nonce));
716 return aes_set_key_common(crypto_aead_tfm(aead),
717 &ctx->aes_key_expanded, key, key_len) ?:
718 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
721 static int gcmaes_wrapper_set_key(struct crypto_aead *parent, const u8 *key,
722 unsigned int key_len)
724 struct cryptd_aead **ctx = crypto_aead_ctx(parent);
725 struct cryptd_aead *cryptd_tfm = *ctx;
727 return crypto_aead_setkey(&cryptd_tfm->base, key, key_len);
730 static int common_rfc4106_set_authsize(struct crypto_aead *aead,
731 unsigned int authsize)
745 /* This is the Integrity Check Value (aka the authentication tag length and can
746 * be 8, 12 or 16 bytes long. */
747 static int gcmaes_wrapper_set_authsize(struct crypto_aead *parent,
748 unsigned int authsize)
750 struct cryptd_aead **ctx = crypto_aead_ctx(parent);
751 struct cryptd_aead *cryptd_tfm = *ctx;
753 return crypto_aead_setauthsize(&cryptd_tfm->base, authsize);
756 static int generic_gcmaes_set_authsize(struct crypto_aead *tfm,
757 unsigned int authsize)
775 static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req,
776 unsigned int assoclen, u8 *hash_subkey,
777 u8 *iv, void *aes_ctx)
779 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
780 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
781 struct aesni_gcm_tfm_s *gcm_tfm = aesni_gcm_tfm;
782 struct gcm_context_data data AESNI_ALIGN_ATTR;
783 struct scatter_walk dst_sg_walk = {};
784 unsigned long left = req->cryptlen;
785 unsigned long len, srclen, dstlen;
786 struct scatter_walk assoc_sg_walk;
787 struct scatter_walk src_sg_walk;
788 struct scatterlist src_start[2];
789 struct scatterlist dst_start[2];
790 struct scatterlist *src_sg;
791 struct scatterlist *dst_sg;
792 u8 *src, *dst, *assoc;
797 left -= auth_tag_len;
799 #ifdef CONFIG_AS_AVX2
800 if (left < AVX_GEN4_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen4)
801 gcm_tfm = &aesni_gcm_tfm_avx_gen2;
804 if (left < AVX_GEN2_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen2)
805 gcm_tfm = &aesni_gcm_tfm_sse;
808 /* Linearize assoc, if not already linear */
809 if (req->src->length >= assoclen && req->src->length &&
810 (!PageHighMem(sg_page(req->src)) ||
811 req->src->offset + req->src->length <= PAGE_SIZE)) {
812 scatterwalk_start(&assoc_sg_walk, req->src);
813 assoc = scatterwalk_map(&assoc_sg_walk);
815 /* assoc can be any length, so must be on heap */
816 assocmem = kmalloc(assoclen, GFP_ATOMIC);
817 if (unlikely(!assocmem))
821 scatterwalk_map_and_copy(assoc, req->src, 0, assoclen, 0);
824 src_sg = scatterwalk_ffwd(src_start, req->src, req->assoclen);
825 scatterwalk_start(&src_sg_walk, src_sg);
826 if (req->src != req->dst) {
827 dst_sg = scatterwalk_ffwd(dst_start, req->dst, req->assoclen);
828 scatterwalk_start(&dst_sg_walk, dst_sg);
832 gcm_tfm->init(aes_ctx, &data, iv,
833 hash_subkey, assoc, assoclen);
834 if (req->src != req->dst) {
836 src = scatterwalk_map(&src_sg_walk);
837 dst = scatterwalk_map(&dst_sg_walk);
838 srclen = scatterwalk_clamp(&src_sg_walk, left);
839 dstlen = scatterwalk_clamp(&dst_sg_walk, left);
840 len = min(srclen, dstlen);
843 gcm_tfm->enc_update(aes_ctx, &data,
846 gcm_tfm->dec_update(aes_ctx, &data,
851 scatterwalk_unmap(src);
852 scatterwalk_unmap(dst);
853 scatterwalk_advance(&src_sg_walk, len);
854 scatterwalk_advance(&dst_sg_walk, len);
855 scatterwalk_done(&src_sg_walk, 0, left);
856 scatterwalk_done(&dst_sg_walk, 1, left);
860 dst = src = scatterwalk_map(&src_sg_walk);
861 len = scatterwalk_clamp(&src_sg_walk, left);
864 gcm_tfm->enc_update(aes_ctx, &data,
867 gcm_tfm->dec_update(aes_ctx, &data,
871 scatterwalk_unmap(src);
872 scatterwalk_advance(&src_sg_walk, len);
873 scatterwalk_done(&src_sg_walk, 1, left);
876 gcm_tfm->finalize(aes_ctx, &data, authTag, auth_tag_len);
880 scatterwalk_unmap(assoc);
887 /* Copy out original authTag */
888 scatterwalk_map_and_copy(authTagMsg, req->src,
889 req->assoclen + req->cryptlen -
893 /* Compare generated tag with passed in tag. */
894 return crypto_memneq(authTagMsg, authTag, auth_tag_len) ?
898 /* Copy in the authTag */
899 scatterwalk_map_and_copy(authTag, req->dst,
900 req->assoclen + req->cryptlen,
906 static int gcmaes_encrypt(struct aead_request *req, unsigned int assoclen,
907 u8 *hash_subkey, u8 *iv, void *aes_ctx)
909 return gcmaes_crypt_by_sg(true, req, assoclen, hash_subkey, iv,
913 static int gcmaes_decrypt(struct aead_request *req, unsigned int assoclen,
914 u8 *hash_subkey, u8 *iv, void *aes_ctx)
916 return gcmaes_crypt_by_sg(false, req, assoclen, hash_subkey, iv,
920 static int helper_rfc4106_encrypt(struct aead_request *req)
922 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
923 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
924 void *aes_ctx = &(ctx->aes_key_expanded);
925 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
927 __be32 counter = cpu_to_be32(1);
929 /* Assuming we are supporting rfc4106 64-bit extended */
930 /* sequence numbers We need to have the AAD length equal */
931 /* to 16 or 20 bytes */
932 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
936 for (i = 0; i < 4; i++)
937 *(iv+i) = ctx->nonce[i];
938 for (i = 0; i < 8; i++)
939 *(iv+4+i) = req->iv[i];
940 *((__be32 *)(iv+12)) = counter;
942 return gcmaes_encrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
946 static int helper_rfc4106_decrypt(struct aead_request *req)
948 __be32 counter = cpu_to_be32(1);
949 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
950 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
951 void *aes_ctx = &(ctx->aes_key_expanded);
952 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
955 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
958 /* Assuming we are supporting rfc4106 64-bit extended */
959 /* sequence numbers We need to have the AAD length */
960 /* equal to 16 or 20 bytes */
963 for (i = 0; i < 4; i++)
964 *(iv+i) = ctx->nonce[i];
965 for (i = 0; i < 8; i++)
966 *(iv+4+i) = req->iv[i];
967 *((__be32 *)(iv+12)) = counter;
969 return gcmaes_decrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
973 static int gcmaes_wrapper_encrypt(struct aead_request *req)
975 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
976 struct cryptd_aead **ctx = crypto_aead_ctx(tfm);
977 struct cryptd_aead *cryptd_tfm = *ctx;
979 tfm = &cryptd_tfm->base;
980 if (irq_fpu_usable() && (!in_atomic() ||
981 !cryptd_aead_queued(cryptd_tfm)))
982 tfm = cryptd_aead_child(cryptd_tfm);
984 aead_request_set_tfm(req, tfm);
986 return crypto_aead_encrypt(req);
989 static int gcmaes_wrapper_decrypt(struct aead_request *req)
991 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
992 struct cryptd_aead **ctx = crypto_aead_ctx(tfm);
993 struct cryptd_aead *cryptd_tfm = *ctx;
995 tfm = &cryptd_tfm->base;
996 if (irq_fpu_usable() && (!in_atomic() ||
997 !cryptd_aead_queued(cryptd_tfm)))
998 tfm = cryptd_aead_child(cryptd_tfm);
1000 aead_request_set_tfm(req, tfm);
1002 return crypto_aead_decrypt(req);
1006 static struct crypto_alg aesni_algs[] = { {
1008 .cra_driver_name = "aes-aesni",
1009 .cra_priority = 300,
1010 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
1011 .cra_blocksize = AES_BLOCK_SIZE,
1012 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1013 .cra_module = THIS_MODULE,
1016 .cia_min_keysize = AES_MIN_KEY_SIZE,
1017 .cia_max_keysize = AES_MAX_KEY_SIZE,
1018 .cia_setkey = aes_set_key,
1019 .cia_encrypt = aes_encrypt,
1020 .cia_decrypt = aes_decrypt
1024 .cra_name = "__aes",
1025 .cra_driver_name = "__aes-aesni",
1026 .cra_priority = 300,
1027 .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_INTERNAL,
1028 .cra_blocksize = AES_BLOCK_SIZE,
1029 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1030 .cra_module = THIS_MODULE,
1033 .cia_min_keysize = AES_MIN_KEY_SIZE,
1034 .cia_max_keysize = AES_MAX_KEY_SIZE,
1035 .cia_setkey = aes_set_key,
1036 .cia_encrypt = __aes_encrypt,
1037 .cia_decrypt = __aes_decrypt
1042 static struct skcipher_alg aesni_skciphers[] = {
1045 .cra_name = "__ecb(aes)",
1046 .cra_driver_name = "__ecb-aes-aesni",
1047 .cra_priority = 400,
1048 .cra_flags = CRYPTO_ALG_INTERNAL,
1049 .cra_blocksize = AES_BLOCK_SIZE,
1050 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1051 .cra_module = THIS_MODULE,
1053 .min_keysize = AES_MIN_KEY_SIZE,
1054 .max_keysize = AES_MAX_KEY_SIZE,
1055 .setkey = aesni_skcipher_setkey,
1056 .encrypt = ecb_encrypt,
1057 .decrypt = ecb_decrypt,
1060 .cra_name = "__cbc(aes)",
1061 .cra_driver_name = "__cbc-aes-aesni",
1062 .cra_priority = 400,
1063 .cra_flags = CRYPTO_ALG_INTERNAL,
1064 .cra_blocksize = AES_BLOCK_SIZE,
1065 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1066 .cra_module = THIS_MODULE,
1068 .min_keysize = AES_MIN_KEY_SIZE,
1069 .max_keysize = AES_MAX_KEY_SIZE,
1070 .ivsize = AES_BLOCK_SIZE,
1071 .setkey = aesni_skcipher_setkey,
1072 .encrypt = cbc_encrypt,
1073 .decrypt = cbc_decrypt,
1074 #ifdef CONFIG_X86_64
1077 .cra_name = "__ctr(aes)",
1078 .cra_driver_name = "__ctr-aes-aesni",
1079 .cra_priority = 400,
1080 .cra_flags = CRYPTO_ALG_INTERNAL,
1082 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1083 .cra_module = THIS_MODULE,
1085 .min_keysize = AES_MIN_KEY_SIZE,
1086 .max_keysize = AES_MAX_KEY_SIZE,
1087 .ivsize = AES_BLOCK_SIZE,
1088 .chunksize = AES_BLOCK_SIZE,
1089 .setkey = aesni_skcipher_setkey,
1090 .encrypt = ctr_crypt,
1091 .decrypt = ctr_crypt,
1094 .cra_name = "__xts(aes)",
1095 .cra_driver_name = "__xts-aes-aesni",
1096 .cra_priority = 401,
1097 .cra_flags = CRYPTO_ALG_INTERNAL,
1098 .cra_blocksize = AES_BLOCK_SIZE,
1099 .cra_ctxsize = XTS_AES_CTX_SIZE,
1100 .cra_module = THIS_MODULE,
1102 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1103 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1104 .ivsize = AES_BLOCK_SIZE,
1105 .setkey = xts_aesni_setkey,
1106 .encrypt = xts_encrypt,
1107 .decrypt = xts_decrypt,
1113 struct simd_skcipher_alg *aesni_simd_skciphers[ARRAY_SIZE(aesni_skciphers)];
1115 #ifdef CONFIG_X86_64
1116 static int generic_gcmaes_set_key(struct crypto_aead *aead, const u8 *key,
1117 unsigned int key_len)
1119 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(aead);
1121 return aes_set_key_common(crypto_aead_tfm(aead),
1122 &ctx->aes_key_expanded, key, key_len) ?:
1123 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
1126 static int generic_gcmaes_encrypt(struct aead_request *req)
1128 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1129 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1130 void *aes_ctx = &(ctx->aes_key_expanded);
1131 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1132 __be32 counter = cpu_to_be32(1);
1134 memcpy(iv, req->iv, 12);
1135 *((__be32 *)(iv+12)) = counter;
1137 return gcmaes_encrypt(req, req->assoclen, ctx->hash_subkey, iv,
1141 static int generic_gcmaes_decrypt(struct aead_request *req)
1143 __be32 counter = cpu_to_be32(1);
1144 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1145 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1146 void *aes_ctx = &(ctx->aes_key_expanded);
1147 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1149 memcpy(iv, req->iv, 12);
1150 *((__be32 *)(iv+12)) = counter;
1152 return gcmaes_decrypt(req, req->assoclen, ctx->hash_subkey, iv,
1156 static int generic_gcmaes_init(struct crypto_aead *aead)
1158 struct cryptd_aead *cryptd_tfm;
1159 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
1161 cryptd_tfm = cryptd_alloc_aead("__driver-generic-gcm-aes-aesni",
1162 CRYPTO_ALG_INTERNAL,
1163 CRYPTO_ALG_INTERNAL);
1164 if (IS_ERR(cryptd_tfm))
1165 return PTR_ERR(cryptd_tfm);
1168 crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base));
1173 static void generic_gcmaes_exit(struct crypto_aead *aead)
1175 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
1177 cryptd_free_aead(*ctx);
1180 static struct aead_alg aesni_aead_algs[] = { {
1181 .setkey = common_rfc4106_set_key,
1182 .setauthsize = common_rfc4106_set_authsize,
1183 .encrypt = helper_rfc4106_encrypt,
1184 .decrypt = helper_rfc4106_decrypt,
1185 .ivsize = GCM_RFC4106_IV_SIZE,
1188 .cra_name = "__gcm-aes-aesni",
1189 .cra_driver_name = "__driver-gcm-aes-aesni",
1190 .cra_flags = CRYPTO_ALG_INTERNAL,
1192 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx),
1193 .cra_alignmask = AESNI_ALIGN - 1,
1194 .cra_module = THIS_MODULE,
1197 .init = rfc4106_init,
1198 .exit = rfc4106_exit,
1199 .setkey = gcmaes_wrapper_set_key,
1200 .setauthsize = gcmaes_wrapper_set_authsize,
1201 .encrypt = gcmaes_wrapper_encrypt,
1202 .decrypt = gcmaes_wrapper_decrypt,
1203 .ivsize = GCM_RFC4106_IV_SIZE,
1206 .cra_name = "rfc4106(gcm(aes))",
1207 .cra_driver_name = "rfc4106-gcm-aesni",
1208 .cra_priority = 400,
1209 .cra_flags = CRYPTO_ALG_ASYNC,
1211 .cra_ctxsize = sizeof(struct cryptd_aead *),
1212 .cra_module = THIS_MODULE,
1215 .setkey = generic_gcmaes_set_key,
1216 .setauthsize = generic_gcmaes_set_authsize,
1217 .encrypt = generic_gcmaes_encrypt,
1218 .decrypt = generic_gcmaes_decrypt,
1219 .ivsize = GCM_AES_IV_SIZE,
1222 .cra_name = "__generic-gcm-aes-aesni",
1223 .cra_driver_name = "__driver-generic-gcm-aes-aesni",
1225 .cra_flags = CRYPTO_ALG_INTERNAL,
1227 .cra_ctxsize = sizeof(struct generic_gcmaes_ctx),
1228 .cra_alignmask = AESNI_ALIGN - 1,
1229 .cra_module = THIS_MODULE,
1232 .init = generic_gcmaes_init,
1233 .exit = generic_gcmaes_exit,
1234 .setkey = gcmaes_wrapper_set_key,
1235 .setauthsize = gcmaes_wrapper_set_authsize,
1236 .encrypt = gcmaes_wrapper_encrypt,
1237 .decrypt = gcmaes_wrapper_decrypt,
1238 .ivsize = GCM_AES_IV_SIZE,
1241 .cra_name = "gcm(aes)",
1242 .cra_driver_name = "generic-gcm-aesni",
1243 .cra_priority = 400,
1244 .cra_flags = CRYPTO_ALG_ASYNC,
1246 .cra_ctxsize = sizeof(struct cryptd_aead *),
1247 .cra_module = THIS_MODULE,
1251 static struct aead_alg aesni_aead_algs[0];
1255 static const struct x86_cpu_id aesni_cpu_id[] = {
1256 X86_FEATURE_MATCH(X86_FEATURE_AES),
1259 MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id);
1261 static void aesni_free_simds(void)
1265 for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers) &&
1266 aesni_simd_skciphers[i]; i++)
1267 simd_skcipher_free(aesni_simd_skciphers[i]);
1270 static int __init aesni_init(void)
1272 struct simd_skcipher_alg *simd;
1273 const char *basename;
1274 const char *algname;
1275 const char *drvname;
1279 if (!x86_match_cpu(aesni_cpu_id))
1281 #ifdef CONFIG_X86_64
1282 #ifdef CONFIG_AS_AVX2
1283 if (boot_cpu_has(X86_FEATURE_AVX2)) {
1284 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
1285 aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen4;
1288 #ifdef CONFIG_AS_AVX
1289 if (boot_cpu_has(X86_FEATURE_AVX)) {
1290 pr_info("AVX version of gcm_enc/dec engaged.\n");
1291 aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen2;
1295 pr_info("SSE version of gcm_enc/dec engaged.\n");
1296 aesni_gcm_tfm = &aesni_gcm_tfm_sse;
1298 aesni_ctr_enc_tfm = aesni_ctr_enc;
1299 #ifdef CONFIG_AS_AVX
1300 if (boot_cpu_has(X86_FEATURE_AVX)) {
1301 /* optimize performance of ctr mode encryption transform */
1302 aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm;
1303 pr_info("AES CTR mode by8 optimization enabled\n");
1308 err = crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1312 err = crypto_register_skciphers(aesni_skciphers,
1313 ARRAY_SIZE(aesni_skciphers));
1315 goto unregister_algs;
1317 err = crypto_register_aeads(aesni_aead_algs,
1318 ARRAY_SIZE(aesni_aead_algs));
1320 goto unregister_skciphers;
1322 for (i = 0; i < ARRAY_SIZE(aesni_skciphers); i++) {
1323 algname = aesni_skciphers[i].base.cra_name + 2;
1324 drvname = aesni_skciphers[i].base.cra_driver_name + 2;
1325 basename = aesni_skciphers[i].base.cra_driver_name;
1326 simd = simd_skcipher_create_compat(algname, drvname, basename);
1327 err = PTR_ERR(simd);
1329 goto unregister_simds;
1331 aesni_simd_skciphers[i] = simd;
1338 crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs));
1339 unregister_skciphers:
1340 crypto_unregister_skciphers(aesni_skciphers,
1341 ARRAY_SIZE(aesni_skciphers));
1343 crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1347 static void __exit aesni_exit(void)
1350 crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs));
1351 crypto_unregister_skciphers(aesni_skciphers,
1352 ARRAY_SIZE(aesni_skciphers));
1353 crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1356 late_initcall(aesni_init);
1357 module_exit(aesni_exit);
1359 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1360 MODULE_LICENSE("GPL");
1361 MODULE_ALIAS_CRYPTO("aes");