blob: a87c5fa76e20d026587bf07961e82426975b9c1e [file] [log] [blame]
David S. Miller9bf48522012-08-21 03:58:13 -07001/* Glue code for AES encryption optimized for sparc64 crypto opcodes.
2 *
3 * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
4 *
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
7 *
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 */
16
17#include <linux/crypto.h>
18#include <linux/init.h>
19#include <linux/module.h>
20#include <linux/mm.h>
21#include <linux/types.h>
22#include <crypto/algapi.h>
23#include <crypto/aes.h>
24
25#include <asm/fpumacro.h>
26#include <asm/pstate.h>
27#include <asm/elf.h>
28
29struct crypto_sparc64_aes_ctx {
30 u64 key[AES_MAX_KEYLENGTH / sizeof(u64)];
31 u32 key_length;
32 u32 expanded_key_length;
33};
34
35extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
36 unsigned int key_len);
37
38static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
39 unsigned int key_len)
40{
41 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
42 u32 *flags = &tfm->crt_flags;
43
44 switch (key_len) {
45 case AES_KEYSIZE_128:
46 ctx->expanded_key_length = 0xb0;
47 break;
48
49 case AES_KEYSIZE_192:
50 ctx->expanded_key_length = 0xd0;
51 break;
52
53 case AES_KEYSIZE_256:
54 ctx->expanded_key_length = 0xf0;
55 break;
56
57 default:
58 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
59 return -EINVAL;
60 }
61
62 aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
63 ctx->key_length = key_len;
64
65 return 0;
66}
67
68extern void aes_sparc64_encrypt(const u64 *key, const u32 *input,
69 u32 *output, unsigned int key_len);
70
71static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
72{
73 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
74
75 aes_sparc64_encrypt(&ctx->key[0], (const u32 *) src,
76 (u32 *) dst, ctx->key_length);
77}
78
79extern void aes_sparc64_decrypt(const u64 *key, const u32 *input,
80 u32 *output, unsigned int key_len,
81 unsigned int expanded_key_len);
82
83static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
84{
85 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
86
87 aes_sparc64_decrypt(&ctx->key[0], (const u32 *) src,
88 (u32 *) dst, ctx->key_length,
89 ctx->expanded_key_length);
90}
91
92extern void aes_sparc64_load_encrypt_keys(u64 *key);
93extern void aes_sparc64_load_decrypt_keys(u64 *key);
94
95#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
96
97extern void aes_sparc64_ecb_encrypt(u64 *key, const u32 *input, u32 *output,
98 unsigned int key_len, unsigned int len);
99
100static int ecb_encrypt(struct blkcipher_desc *desc,
101 struct scatterlist *dst, struct scatterlist *src,
102 unsigned int nbytes)
103{
104 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
105 struct blkcipher_walk walk;
106 int err;
107
108 blkcipher_walk_init(&walk, dst, src, nbytes);
109 err = blkcipher_walk_virt(desc, &walk);
110
111 aes_sparc64_load_encrypt_keys(&ctx->key[0]);
112 while ((nbytes = walk.nbytes)) {
113 unsigned int block_len = nbytes & AES_BLOCK_MASK;
114
115 if (likely(block_len)) {
116 aes_sparc64_ecb_encrypt(&ctx->key[0],
117 (const u32 *)walk.src.virt.addr,
118 (u32 *) walk.dst.virt.addr,
119 ctx->key_length, block_len);
120 }
121 nbytes &= AES_BLOCK_SIZE - 1;
122 err = blkcipher_walk_done(desc, &walk, nbytes);
123 }
124 fprs_write(0);
125 return err;
126}
127
128extern void aes_sparc64_ecb_decrypt(u64 *ekey, const u32 *input, u32 *output,
129 unsigned int key_len, unsigned int len);
130
131static int ecb_decrypt(struct blkcipher_desc *desc,
132 struct scatterlist *dst, struct scatterlist *src,
133 unsigned int nbytes)
134{
135 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
136 struct blkcipher_walk walk;
137 u64 *key_end;
138 int err;
139
140 blkcipher_walk_init(&walk, dst, src, nbytes);
141 err = blkcipher_walk_virt(desc, &walk);
142
143 aes_sparc64_load_decrypt_keys(&ctx->key[0]);
144 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
145 while ((nbytes = walk.nbytes)) {
146 unsigned int block_len = nbytes & AES_BLOCK_MASK;
147
148 aes_sparc64_ecb_decrypt(key_end, (const u32 *) walk.src.virt.addr,
149 (u32 *) walk.dst.virt.addr, ctx->key_length,
150 block_len);
151 nbytes &= AES_BLOCK_SIZE - 1;
152 err = blkcipher_walk_done(desc, &walk, nbytes);
153 }
154 fprs_write(0);
155
156 return err;
157}
158
159extern void aes_sparc64_cbc_encrypt(u64 *key, const u32 *input, u32 *output,
160 unsigned int key_len, unsigned int len,
161 u64 *iv);
162
163static int cbc_encrypt(struct blkcipher_desc *desc,
164 struct scatterlist *dst, struct scatterlist *src,
165 unsigned int nbytes)
166{
167 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
168 struct blkcipher_walk walk;
169 int err;
170
171 blkcipher_walk_init(&walk, dst, src, nbytes);
172 err = blkcipher_walk_virt(desc, &walk);
173
174 aes_sparc64_load_encrypt_keys(&ctx->key[0]);
175 while ((nbytes = walk.nbytes)) {
176 unsigned int block_len = nbytes & AES_BLOCK_MASK;
177
178 if (likely(block_len)) {
179 aes_sparc64_cbc_encrypt(&ctx->key[0],
180 (const u32 *)walk.src.virt.addr,
181 (u32 *) walk.dst.virt.addr,
182 ctx->key_length, block_len,
183 (u64 *) walk.iv);
184 }
185 nbytes &= AES_BLOCK_SIZE - 1;
186 err = blkcipher_walk_done(desc, &walk, nbytes);
187 }
188 fprs_write(0);
189 return err;
190}
191
192extern void aes_sparc64_cbc_decrypt(u64 *ekey, unsigned int key_len,
193 const u32 *input, u32 *output,
194 unsigned int len, u64 *iv);
195
196static int cbc_decrypt(struct blkcipher_desc *desc,
197 struct scatterlist *dst, struct scatterlist *src,
198 unsigned int nbytes)
199{
200 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
201 struct blkcipher_walk walk;
202 u64 *key_end;
203 int err;
204
205 blkcipher_walk_init(&walk, dst, src, nbytes);
206 err = blkcipher_walk_virt(desc, &walk);
207
208 aes_sparc64_load_decrypt_keys(&ctx->key[0]);
209 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
210 while ((nbytes = walk.nbytes)) {
211 unsigned int block_len = nbytes & AES_BLOCK_MASK;
212
213 aes_sparc64_cbc_decrypt(key_end, ctx->key_length,
214 (const u32 *) walk.src.virt.addr,
215 (u32 *) walk.dst.virt.addr,
216 block_len, (u64 *) walk.iv);
217 nbytes &= AES_BLOCK_SIZE - 1;
218 err = blkcipher_walk_done(desc, &walk, nbytes);
219 }
220 fprs_write(0);
221
222 return err;
223}
224
225static struct crypto_alg algs[] = { {
226 .cra_name = "aes",
227 .cra_driver_name = "aes-sparc64",
228 .cra_priority = 150,
229 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
230 .cra_blocksize = AES_BLOCK_SIZE,
231 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
232 .cra_alignmask = 3,
233 .cra_module = THIS_MODULE,
234 .cra_u = {
235 .cipher = {
236 .cia_min_keysize = AES_MIN_KEY_SIZE,
237 .cia_max_keysize = AES_MAX_KEY_SIZE,
238 .cia_setkey = aes_set_key,
239 .cia_encrypt = aes_encrypt,
240 .cia_decrypt = aes_decrypt
241 }
242 }
243}, {
244 .cra_name = "ecb(aes)",
245 .cra_driver_name = "ecb-aes-sparc64",
246 .cra_priority = 150,
247 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
248 .cra_blocksize = AES_BLOCK_SIZE,
249 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
250 .cra_alignmask = 7,
251 .cra_type = &crypto_blkcipher_type,
252 .cra_module = THIS_MODULE,
253 .cra_u = {
254 .blkcipher = {
255 .min_keysize = AES_MIN_KEY_SIZE,
256 .max_keysize = AES_MAX_KEY_SIZE,
257 .setkey = aes_set_key,
258 .encrypt = ecb_encrypt,
259 .decrypt = ecb_decrypt,
260 },
261 },
262}, {
263 .cra_name = "cbc(aes)",
264 .cra_driver_name = "cbc-aes-sparc64",
265 .cra_priority = 150,
266 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
267 .cra_blocksize = AES_BLOCK_SIZE,
268 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
269 .cra_alignmask = 7,
270 .cra_type = &crypto_blkcipher_type,
271 .cra_module = THIS_MODULE,
272 .cra_u = {
273 .blkcipher = {
274 .min_keysize = AES_MIN_KEY_SIZE,
275 .max_keysize = AES_MAX_KEY_SIZE,
276 .setkey = aes_set_key,
277 .encrypt = cbc_encrypt,
278 .decrypt = cbc_decrypt,
279 },
280 },
281} };
282
283static bool __init sparc64_has_aes_opcode(void)
284{
285 unsigned long cfr;
286
287 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
288 return false;
289
290 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
291 if (!(cfr & CFR_AES))
292 return false;
293
294 return true;
295}
296
297static int __init aes_sparc64_mod_init(void)
298{
299 int i;
300
301 for (i = 0; i < ARRAY_SIZE(algs); i++)
302 INIT_LIST_HEAD(&algs[i].cra_list);
303
304 if (sparc64_has_aes_opcode()) {
305 pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
306 return crypto_register_algs(algs, ARRAY_SIZE(algs));
307 }
308 pr_info("sparc64 aes opcodes not available.\n");
309 return -ENODEV;
310}
311
312static void __exit aes_sparc64_mod_fini(void)
313{
314 crypto_unregister_algs(algs, ARRAY_SIZE(algs));
315}
316
317module_init(aes_sparc64_mod_init);
318module_exit(aes_sparc64_mod_fini);
319
320MODULE_LICENSE("GPL");
321MODULE_DESCRIPTION("AES Secure Hash Algorithm, sparc64 aes opcode accelerated");
322
323MODULE_ALIAS("aes");