blob: f457fc69edebe115da117789028527a95f1fc13e [file] [log] [blame]
David S. Miller9bf48522012-08-21 03:58:13 -07001/* Glue code for AES encryption optimized for sparc64 crypto opcodes.
2 *
3 * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
4 *
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
7 *
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 */
16
17#include <linux/crypto.h>
18#include <linux/init.h>
19#include <linux/module.h>
20#include <linux/mm.h>
21#include <linux/types.h>
22#include <crypto/algapi.h>
23#include <crypto/aes.h>
24
25#include <asm/fpumacro.h>
26#include <asm/pstate.h>
27#include <asm/elf.h>
28
David S. Miller0bdcaf72012-08-29 12:50:16 -070029struct aes_ops {
30 void (*encrypt)(const u64 *key, const u32 *input, u32 *output);
31 void (*decrypt)(const u64 *key, const u32 *input, u32 *output);
32 void (*load_encrypt_keys)(const u64 *key);
33 void (*load_decrypt_keys)(const u64 *key);
34 void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output,
35 unsigned int len);
36 void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output,
37 unsigned int len);
38 void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output,
39 unsigned int len, u64 *iv);
40 void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output,
41 unsigned int len, u64 *iv);
David S. Miller9fd130e2012-08-29 14:49:23 -070042 void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output,
43 unsigned int len, u64 *iv);
David S. Miller0bdcaf72012-08-29 12:50:16 -070044};
45
David S. Miller9bf48522012-08-21 03:58:13 -070046struct crypto_sparc64_aes_ctx {
David S. Miller0bdcaf72012-08-29 12:50:16 -070047 struct aes_ops *ops;
David S. Miller9bf48522012-08-21 03:58:13 -070048 u64 key[AES_MAX_KEYLENGTH / sizeof(u64)];
49 u32 key_length;
50 u32 expanded_key_length;
51};
52
David S. Miller0bdcaf72012-08-29 12:50:16 -070053extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input,
54 u32 *output);
55extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input,
56 u32 *output);
57extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input,
58 u32 *output);
59
60extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input,
61 u32 *output);
62extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input,
63 u32 *output);
64extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input,
65 u32 *output);
66
67extern void aes_sparc64_load_encrypt_keys_128(const u64 *key);
68extern void aes_sparc64_load_encrypt_keys_192(const u64 *key);
69extern void aes_sparc64_load_encrypt_keys_256(const u64 *key);
70
71extern void aes_sparc64_load_decrypt_keys_128(const u64 *key);
72extern void aes_sparc64_load_decrypt_keys_192(const u64 *key);
73extern void aes_sparc64_load_decrypt_keys_256(const u64 *key);
74
75extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input,
76 u64 *output, unsigned int len);
77extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input,
78 u64 *output, unsigned int len);
79extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input,
80 u64 *output, unsigned int len);
81
82extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input,
83 u64 *output, unsigned int len);
84extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input,
85 u64 *output, unsigned int len);
86extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input,
87 u64 *output, unsigned int len);
88
89extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input,
90 u64 *output, unsigned int len,
91 u64 *iv);
92
93extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input,
94 u64 *output, unsigned int len,
95 u64 *iv);
96
97extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input,
98 u64 *output, unsigned int len,
99 u64 *iv);
100
101extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input,
102 u64 *output, unsigned int len,
103 u64 *iv);
104
105extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input,
106 u64 *output, unsigned int len,
107 u64 *iv);
108
109extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input,
110 u64 *output, unsigned int len,
111 u64 *iv);
112
David S. Miller9fd130e2012-08-29 14:49:23 -0700113extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input,
114 u64 *output, unsigned int len,
115 u64 *iv);
116extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input,
117 u64 *output, unsigned int len,
118 u64 *iv);
119extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input,
120 u64 *output, unsigned int len,
121 u64 *iv);
122
David S. Miller0bdcaf72012-08-29 12:50:16 -0700123struct aes_ops aes128_ops = {
124 .encrypt = aes_sparc64_encrypt_128,
125 .decrypt = aes_sparc64_decrypt_128,
126 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_128,
127 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_128,
128 .ecb_encrypt = aes_sparc64_ecb_encrypt_128,
129 .ecb_decrypt = aes_sparc64_ecb_decrypt_128,
130 .cbc_encrypt = aes_sparc64_cbc_encrypt_128,
131 .cbc_decrypt = aes_sparc64_cbc_decrypt_128,
David S. Miller9fd130e2012-08-29 14:49:23 -0700132 .ctr_crypt = aes_sparc64_ctr_crypt_128,
David S. Miller0bdcaf72012-08-29 12:50:16 -0700133};
134
135struct aes_ops aes192_ops = {
136 .encrypt = aes_sparc64_encrypt_192,
137 .decrypt = aes_sparc64_decrypt_192,
138 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_192,
139 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_192,
140 .ecb_encrypt = aes_sparc64_ecb_encrypt_192,
141 .ecb_decrypt = aes_sparc64_ecb_decrypt_192,
142 .cbc_encrypt = aes_sparc64_cbc_encrypt_192,
143 .cbc_decrypt = aes_sparc64_cbc_decrypt_192,
David S. Miller9fd130e2012-08-29 14:49:23 -0700144 .ctr_crypt = aes_sparc64_ctr_crypt_192,
David S. Miller0bdcaf72012-08-29 12:50:16 -0700145};
146
147struct aes_ops aes256_ops = {
148 .encrypt = aes_sparc64_encrypt_256,
149 .decrypt = aes_sparc64_decrypt_256,
150 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_256,
151 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_256,
152 .ecb_encrypt = aes_sparc64_ecb_encrypt_256,
153 .ecb_decrypt = aes_sparc64_ecb_decrypt_256,
154 .cbc_encrypt = aes_sparc64_cbc_encrypt_256,
155 .cbc_decrypt = aes_sparc64_cbc_decrypt_256,
David S. Miller9fd130e2012-08-29 14:49:23 -0700156 .ctr_crypt = aes_sparc64_ctr_crypt_256,
David S. Miller0bdcaf72012-08-29 12:50:16 -0700157};
158
David S. Miller9bf48522012-08-21 03:58:13 -0700159extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
160 unsigned int key_len);
161
162static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
163 unsigned int key_len)
164{
165 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
166 u32 *flags = &tfm->crt_flags;
167
168 switch (key_len) {
169 case AES_KEYSIZE_128:
170 ctx->expanded_key_length = 0xb0;
David S. Miller0bdcaf72012-08-29 12:50:16 -0700171 ctx->ops = &aes128_ops;
David S. Miller9bf48522012-08-21 03:58:13 -0700172 break;
173
174 case AES_KEYSIZE_192:
175 ctx->expanded_key_length = 0xd0;
David S. Miller0bdcaf72012-08-29 12:50:16 -0700176 ctx->ops = &aes192_ops;
David S. Miller9bf48522012-08-21 03:58:13 -0700177 break;
178
179 case AES_KEYSIZE_256:
180 ctx->expanded_key_length = 0xf0;
David S. Miller0bdcaf72012-08-29 12:50:16 -0700181 ctx->ops = &aes256_ops;
David S. Miller9bf48522012-08-21 03:58:13 -0700182 break;
183
184 default:
185 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
186 return -EINVAL;
187 }
188
189 aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
190 ctx->key_length = key_len;
191
192 return 0;
193}
194
David S. Miller9bf48522012-08-21 03:58:13 -0700195static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
196{
197 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
198
David S. Miller0bdcaf72012-08-29 12:50:16 -0700199 ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
David S. Miller9bf48522012-08-21 03:58:13 -0700200}
201
David S. Miller9bf48522012-08-21 03:58:13 -0700202static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
203{
204 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
205
David S. Miller0bdcaf72012-08-29 12:50:16 -0700206 ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
David S. Miller9bf48522012-08-21 03:58:13 -0700207}
208
David S. Miller9bf48522012-08-21 03:58:13 -0700209#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
210
David S. Miller9bf48522012-08-21 03:58:13 -0700211static int ecb_encrypt(struct blkcipher_desc *desc,
212 struct scatterlist *dst, struct scatterlist *src,
213 unsigned int nbytes)
214{
215 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
216 struct blkcipher_walk walk;
217 int err;
218
219 blkcipher_walk_init(&walk, dst, src, nbytes);
220 err = blkcipher_walk_virt(desc, &walk);
221
David S. Miller0bdcaf72012-08-29 12:50:16 -0700222 ctx->ops->load_encrypt_keys(&ctx->key[0]);
David S. Miller9bf48522012-08-21 03:58:13 -0700223 while ((nbytes = walk.nbytes)) {
224 unsigned int block_len = nbytes & AES_BLOCK_MASK;
225
226 if (likely(block_len)) {
David S. Miller0bdcaf72012-08-29 12:50:16 -0700227 ctx->ops->ecb_encrypt(&ctx->key[0],
228 (const u64 *)walk.src.virt.addr,
229 (u64 *) walk.dst.virt.addr,
230 block_len);
David S. Miller9bf48522012-08-21 03:58:13 -0700231 }
232 nbytes &= AES_BLOCK_SIZE - 1;
233 err = blkcipher_walk_done(desc, &walk, nbytes);
234 }
235 fprs_write(0);
236 return err;
237}
238
David S. Miller9bf48522012-08-21 03:58:13 -0700239static int ecb_decrypt(struct blkcipher_desc *desc,
240 struct scatterlist *dst, struct scatterlist *src,
241 unsigned int nbytes)
242{
243 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
244 struct blkcipher_walk walk;
245 u64 *key_end;
246 int err;
247
248 blkcipher_walk_init(&walk, dst, src, nbytes);
249 err = blkcipher_walk_virt(desc, &walk);
250
David S. Miller0bdcaf72012-08-29 12:50:16 -0700251 ctx->ops->load_decrypt_keys(&ctx->key[0]);
David S. Miller9bf48522012-08-21 03:58:13 -0700252 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
253 while ((nbytes = walk.nbytes)) {
254 unsigned int block_len = nbytes & AES_BLOCK_MASK;
255
David S. Miller0bdcaf72012-08-29 12:50:16 -0700256 if (likely(block_len)) {
257 ctx->ops->ecb_decrypt(key_end,
258 (const u64 *) walk.src.virt.addr,
259 (u64 *) walk.dst.virt.addr, block_len);
260 }
David S. Miller9bf48522012-08-21 03:58:13 -0700261 nbytes &= AES_BLOCK_SIZE - 1;
262 err = blkcipher_walk_done(desc, &walk, nbytes);
263 }
264 fprs_write(0);
265
266 return err;
267}
268
David S. Miller9bf48522012-08-21 03:58:13 -0700269static int cbc_encrypt(struct blkcipher_desc *desc,
270 struct scatterlist *dst, struct scatterlist *src,
271 unsigned int nbytes)
272{
273 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
274 struct blkcipher_walk walk;
275 int err;
276
277 blkcipher_walk_init(&walk, dst, src, nbytes);
278 err = blkcipher_walk_virt(desc, &walk);
279
David S. Miller0bdcaf72012-08-29 12:50:16 -0700280 ctx->ops->load_encrypt_keys(&ctx->key[0]);
David S. Miller9bf48522012-08-21 03:58:13 -0700281 while ((nbytes = walk.nbytes)) {
282 unsigned int block_len = nbytes & AES_BLOCK_MASK;
283
284 if (likely(block_len)) {
David S. Miller0bdcaf72012-08-29 12:50:16 -0700285 ctx->ops->cbc_encrypt(&ctx->key[0],
286 (const u64 *)walk.src.virt.addr,
287 (u64 *) walk.dst.virt.addr,
288 block_len, (u64 *) walk.iv);
David S. Miller9bf48522012-08-21 03:58:13 -0700289 }
290 nbytes &= AES_BLOCK_SIZE - 1;
291 err = blkcipher_walk_done(desc, &walk, nbytes);
292 }
293 fprs_write(0);
294 return err;
295}
296
David S. Miller9bf48522012-08-21 03:58:13 -0700297static int cbc_decrypt(struct blkcipher_desc *desc,
298 struct scatterlist *dst, struct scatterlist *src,
299 unsigned int nbytes)
300{
301 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
302 struct blkcipher_walk walk;
303 u64 *key_end;
304 int err;
305
306 blkcipher_walk_init(&walk, dst, src, nbytes);
307 err = blkcipher_walk_virt(desc, &walk);
308
David S. Miller0bdcaf72012-08-29 12:50:16 -0700309 ctx->ops->load_decrypt_keys(&ctx->key[0]);
David S. Miller9bf48522012-08-21 03:58:13 -0700310 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
311 while ((nbytes = walk.nbytes)) {
312 unsigned int block_len = nbytes & AES_BLOCK_MASK;
313
David S. Miller0bdcaf72012-08-29 12:50:16 -0700314 if (likely(block_len)) {
315 ctx->ops->cbc_decrypt(key_end,
316 (const u64 *) walk.src.virt.addr,
317 (u64 *) walk.dst.virt.addr,
318 block_len, (u64 *) walk.iv);
319 }
David S. Miller9bf48522012-08-21 03:58:13 -0700320 nbytes &= AES_BLOCK_SIZE - 1;
321 err = blkcipher_walk_done(desc, &walk, nbytes);
322 }
323 fprs_write(0);
324
325 return err;
326}
327
David S. Miller9fd130e2012-08-29 14:49:23 -0700328static int ctr_crypt(struct blkcipher_desc *desc,
329 struct scatterlist *dst, struct scatterlist *src,
330 unsigned int nbytes)
331{
332 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
333 struct blkcipher_walk walk;
334 int err;
335
336 blkcipher_walk_init(&walk, dst, src, nbytes);
337 err = blkcipher_walk_virt(desc, &walk);
338
339 ctx->ops->load_encrypt_keys(&ctx->key[0]);
340 while ((nbytes = walk.nbytes)) {
341 unsigned int block_len = nbytes & AES_BLOCK_MASK;
342
343 if (likely(block_len)) {
344 ctx->ops->ctr_crypt(&ctx->key[0],
345 (const u64 *)walk.src.virt.addr,
346 (u64 *) walk.dst.virt.addr,
347 block_len, (u64 *) walk.iv);
348 }
349 nbytes &= AES_BLOCK_SIZE - 1;
350 err = blkcipher_walk_done(desc, &walk, nbytes);
351 }
352 fprs_write(0);
353 return err;
354}
355
David S. Miller9bf48522012-08-21 03:58:13 -0700356static struct crypto_alg algs[] = { {
357 .cra_name = "aes",
358 .cra_driver_name = "aes-sparc64",
359 .cra_priority = 150,
360 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
361 .cra_blocksize = AES_BLOCK_SIZE,
362 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
363 .cra_alignmask = 3,
364 .cra_module = THIS_MODULE,
365 .cra_u = {
366 .cipher = {
367 .cia_min_keysize = AES_MIN_KEY_SIZE,
368 .cia_max_keysize = AES_MAX_KEY_SIZE,
369 .cia_setkey = aes_set_key,
370 .cia_encrypt = aes_encrypt,
371 .cia_decrypt = aes_decrypt
372 }
373 }
374}, {
375 .cra_name = "ecb(aes)",
376 .cra_driver_name = "ecb-aes-sparc64",
377 .cra_priority = 150,
378 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
379 .cra_blocksize = AES_BLOCK_SIZE,
380 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
381 .cra_alignmask = 7,
382 .cra_type = &crypto_blkcipher_type,
383 .cra_module = THIS_MODULE,
384 .cra_u = {
385 .blkcipher = {
386 .min_keysize = AES_MIN_KEY_SIZE,
387 .max_keysize = AES_MAX_KEY_SIZE,
388 .setkey = aes_set_key,
389 .encrypt = ecb_encrypt,
390 .decrypt = ecb_decrypt,
391 },
392 },
393}, {
394 .cra_name = "cbc(aes)",
395 .cra_driver_name = "cbc-aes-sparc64",
396 .cra_priority = 150,
397 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
398 .cra_blocksize = AES_BLOCK_SIZE,
399 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
400 .cra_alignmask = 7,
401 .cra_type = &crypto_blkcipher_type,
402 .cra_module = THIS_MODULE,
403 .cra_u = {
404 .blkcipher = {
405 .min_keysize = AES_MIN_KEY_SIZE,
406 .max_keysize = AES_MAX_KEY_SIZE,
407 .setkey = aes_set_key,
408 .encrypt = cbc_encrypt,
409 .decrypt = cbc_decrypt,
410 },
411 },
David S. Miller9fd130e2012-08-29 14:49:23 -0700412}, {
413 .cra_name = "ctr(aes)",
414 .cra_driver_name = "ctr-aes-sparc64",
415 .cra_priority = 150,
416 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
417 .cra_blocksize = AES_BLOCK_SIZE,
418 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
419 .cra_alignmask = 7,
420 .cra_type = &crypto_blkcipher_type,
421 .cra_module = THIS_MODULE,
422 .cra_u = {
423 .blkcipher = {
424 .min_keysize = AES_MIN_KEY_SIZE,
425 .max_keysize = AES_MAX_KEY_SIZE,
426 .setkey = aes_set_key,
427 .encrypt = ctr_crypt,
428 .decrypt = ctr_crypt,
429 },
430 },
David S. Miller9bf48522012-08-21 03:58:13 -0700431} };
432
433static bool __init sparc64_has_aes_opcode(void)
434{
435 unsigned long cfr;
436
437 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
438 return false;
439
440 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
441 if (!(cfr & CFR_AES))
442 return false;
443
444 return true;
445}
446
447static int __init aes_sparc64_mod_init(void)
448{
449 int i;
450
451 for (i = 0; i < ARRAY_SIZE(algs); i++)
452 INIT_LIST_HEAD(&algs[i].cra_list);
453
454 if (sparc64_has_aes_opcode()) {
455 pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
456 return crypto_register_algs(algs, ARRAY_SIZE(algs));
457 }
458 pr_info("sparc64 aes opcodes not available.\n");
459 return -ENODEV;
460}
461
462static void __exit aes_sparc64_mod_fini(void)
463{
464 crypto_unregister_algs(algs, ARRAY_SIZE(algs));
465}
466
467module_init(aes_sparc64_mod_init);
468module_exit(aes_sparc64_mod_fini);
469
470MODULE_LICENSE("GPL");
471MODULE_DESCRIPTION("AES Secure Hash Algorithm, sparc64 aes opcode accelerated");
472
473MODULE_ALIAS("aes");