blob: 8230e8605deb950bfc4561a6be6edb447de55e53 [file] [log] [blame]
Jan Glauberbf754ae2006-01-06 00:19:18 -08001/*
2 * Cryptographic API.
3 *
4 * s390 implementation of the AES Cipher Algorithm.
5 *
6 * s390 Version:
Jan Glauber86aa9fc2007-02-05 21:18:14 +01007 * Copyright IBM Corp. 2005,2007
Jan Glauberbf754ae2006-01-06 00:19:18 -08008 * Author(s): Jan Glauber (jang@de.ibm.com)
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +11009 * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
Jan Glauberbf754ae2006-01-06 00:19:18 -080010 *
Sebastian Siewiorf8246af2007-10-05 16:52:01 +080011 * Derived from "crypto/aes_generic.c"
Jan Glauberbf754ae2006-01-06 00:19:18 -080012 *
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
16 * any later version.
17 *
18 */
19
Jan Glauber39f09392008-12-25 13:39:37 +010020#define KMSG_COMPONENT "aes_s390"
21#define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
22
Sebastian Siewior89e12652007-10-17 23:18:57 +080023#include <crypto/aes.h>
Herbert Xua9e62fa2006-08-21 21:39:24 +100024#include <crypto/algapi.h>
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +110025#include <linux/err.h>
Jan Glauberbf754ae2006-01-06 00:19:18 -080026#include <linux/module.h>
27#include <linux/init.h>
Jan Glauberbf754ae2006-01-06 00:19:18 -080028#include "crypt_s390.h"
29
Jan Glauber86aa9fc2007-02-05 21:18:14 +010030#define AES_KEYLEN_128 1
31#define AES_KEYLEN_192 2
32#define AES_KEYLEN_256 4
33
34static char keylen_flag = 0;
Jan Glauberbf754ae2006-01-06 00:19:18 -080035
36struct s390_aes_ctx {
37 u8 iv[AES_BLOCK_SIZE];
38 u8 key[AES_MAX_KEY_SIZE];
Herbert Xua9e62fa2006-08-21 21:39:24 +100039 long enc;
40 long dec;
Jan Glauberbf754ae2006-01-06 00:19:18 -080041 int key_len;
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +110042 union {
43 struct crypto_blkcipher *blk;
44 struct crypto_cipher *cip;
45 } fallback;
Jan Glauberbf754ae2006-01-06 00:19:18 -080046};
47
Gerald Schaefer99d97222011-04-26 16:12:42 +100048struct pcc_param {
49 u8 key[32];
50 u8 tweak[16];
51 u8 block[16];
52 u8 bit[16];
53 u8 xts[16];
54};
55
56struct s390_xts_ctx {
57 u8 key[32];
58 u8 xts_param[16];
59 struct pcc_param pcc;
60 long enc;
61 long dec;
62 int key_len;
63 struct crypto_blkcipher *fallback;
64};
65
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +110066/*
67 * Check if the key_len is supported by the HW.
68 * Returns 0 if it is, a positive number if it is not and software fallback is
69 * required or a negative number in case the key size is not valid
70 */
71static int need_fallback(unsigned int key_len)
72{
73 switch (key_len) {
74 case 16:
75 if (!(keylen_flag & AES_KEYLEN_128))
76 return 1;
77 break;
78 case 24:
79 if (!(keylen_flag & AES_KEYLEN_192))
80 return 1;
81 break;
82 case 32:
83 if (!(keylen_flag & AES_KEYLEN_256))
84 return 1;
85 break;
86 default:
87 return -1;
88 break;
89 }
90 return 0;
91}
92
93static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
94 unsigned int key_len)
95{
96 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
97 int ret;
98
Roel Kluind7ac7692010-01-08 14:18:34 +110099 sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
100 sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags &
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100101 CRYPTO_TFM_REQ_MASK);
102
103 ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
104 if (ret) {
105 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
Roel Kluind7ac7692010-01-08 14:18:34 +1100106 tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags &
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100107 CRYPTO_TFM_RES_MASK);
108 }
109 return ret;
110}
111
Herbert Xu6c2bb982006-05-16 22:09:29 +1000112static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
Herbert Xu560c06a2006-08-13 14:16:39 +1000113 unsigned int key_len)
Jan Glauberbf754ae2006-01-06 00:19:18 -0800114{
Herbert Xu6c2bb982006-05-16 22:09:29 +1000115 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
Herbert Xu560c06a2006-08-13 14:16:39 +1000116 u32 *flags = &tfm->crt_flags;
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100117 int ret;
Jan Glauberbf754ae2006-01-06 00:19:18 -0800118
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100119 ret = need_fallback(key_len);
120 if (ret < 0) {
121 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
122 return -EINVAL;
Jan Glauberbf754ae2006-01-06 00:19:18 -0800123 }
124
125 sctx->key_len = key_len;
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100126 if (!ret) {
127 memcpy(sctx->key, in_key, key_len);
128 return 0;
129 }
130
131 return setkey_fallback_cip(tfm, in_key, key_len);
Jan Glauberbf754ae2006-01-06 00:19:18 -0800132}
133
Herbert Xu6c2bb982006-05-16 22:09:29 +1000134static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
Jan Glauberbf754ae2006-01-06 00:19:18 -0800135{
Herbert Xu6c2bb982006-05-16 22:09:29 +1000136 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
Jan Glauberbf754ae2006-01-06 00:19:18 -0800137
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100138 if (unlikely(need_fallback(sctx->key_len))) {
139 crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
140 return;
141 }
142
Jan Glauberbf754ae2006-01-06 00:19:18 -0800143 switch (sctx->key_len) {
144 case 16:
145 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
146 AES_BLOCK_SIZE);
147 break;
148 case 24:
149 crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
150 AES_BLOCK_SIZE);
151 break;
152 case 32:
153 crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
154 AES_BLOCK_SIZE);
155 break;
156 }
157}
158
Herbert Xu6c2bb982006-05-16 22:09:29 +1000159static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
Jan Glauberbf754ae2006-01-06 00:19:18 -0800160{
Herbert Xu6c2bb982006-05-16 22:09:29 +1000161 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
Jan Glauberbf754ae2006-01-06 00:19:18 -0800162
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100163 if (unlikely(need_fallback(sctx->key_len))) {
164 crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
165 return;
166 }
167
Jan Glauberbf754ae2006-01-06 00:19:18 -0800168 switch (sctx->key_len) {
169 case 16:
170 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
171 AES_BLOCK_SIZE);
172 break;
173 case 24:
174 crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
175 AES_BLOCK_SIZE);
176 break;
177 case 32:
178 crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
179 AES_BLOCK_SIZE);
180 break;
181 }
182}
183
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100184static int fallback_init_cip(struct crypto_tfm *tfm)
185{
186 const char *name = tfm->__crt_alg->cra_name;
187 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
188
189 sctx->fallback.cip = crypto_alloc_cipher(name, 0,
190 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
191
192 if (IS_ERR(sctx->fallback.cip)) {
Jan Glauber39f09392008-12-25 13:39:37 +0100193 pr_err("Allocating AES fallback algorithm %s failed\n",
194 name);
Roel Kluinb59cdcb2009-12-18 17:43:18 +0100195 return PTR_ERR(sctx->fallback.cip);
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100196 }
197
198 return 0;
199}
200
201static void fallback_exit_cip(struct crypto_tfm *tfm)
202{
203 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
204
205 crypto_free_cipher(sctx->fallback.cip);
206 sctx->fallback.cip = NULL;
207}
Jan Glauberbf754ae2006-01-06 00:19:18 -0800208
209static struct crypto_alg aes_alg = {
210 .cra_name = "aes",
Herbert Xu65b75c32006-08-21 21:18:50 +1000211 .cra_driver_name = "aes-s390",
212 .cra_priority = CRYPT_S390_PRIORITY,
Jan Glauberf67d1362007-05-04 18:47:47 +0200213 .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
214 CRYPTO_ALG_NEED_FALLBACK,
Jan Glauberbf754ae2006-01-06 00:19:18 -0800215 .cra_blocksize = AES_BLOCK_SIZE,
216 .cra_ctxsize = sizeof(struct s390_aes_ctx),
217 .cra_module = THIS_MODULE,
218 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100219 .cra_init = fallback_init_cip,
220 .cra_exit = fallback_exit_cip,
Jan Glauberbf754ae2006-01-06 00:19:18 -0800221 .cra_u = {
222 .cipher = {
223 .cia_min_keysize = AES_MIN_KEY_SIZE,
224 .cia_max_keysize = AES_MAX_KEY_SIZE,
225 .cia_setkey = aes_set_key,
226 .cia_encrypt = aes_encrypt,
227 .cia_decrypt = aes_decrypt,
Jan Glauberbf754ae2006-01-06 00:19:18 -0800228 }
229 }
230};
231
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100232static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
233 unsigned int len)
234{
235 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
236 unsigned int ret;
237
238 sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
239 sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
240 CRYPTO_TFM_REQ_MASK);
241
242 ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
243 if (ret) {
244 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
245 tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
246 CRYPTO_TFM_RES_MASK);
247 }
248 return ret;
249}
250
251static int fallback_blk_dec(struct blkcipher_desc *desc,
252 struct scatterlist *dst, struct scatterlist *src,
253 unsigned int nbytes)
254{
255 unsigned int ret;
256 struct crypto_blkcipher *tfm;
257 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
258
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100259 tfm = desc->tfm;
260 desc->tfm = sctx->fallback.blk;
261
Sebastian Siewior2d74d402007-12-10 15:49:41 +0800262 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100263
264 desc->tfm = tfm;
265 return ret;
266}
267
268static int fallback_blk_enc(struct blkcipher_desc *desc,
269 struct scatterlist *dst, struct scatterlist *src,
270 unsigned int nbytes)
271{
272 unsigned int ret;
273 struct crypto_blkcipher *tfm;
274 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
275
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100276 tfm = desc->tfm;
277 desc->tfm = sctx->fallback.blk;
278
Sebastian Siewior2d74d402007-12-10 15:49:41 +0800279 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100280
281 desc->tfm = tfm;
282 return ret;
283}
284
Herbert Xua9e62fa2006-08-21 21:39:24 +1000285static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
286 unsigned int key_len)
287{
288 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100289 int ret;
290
291 ret = need_fallback(key_len);
292 if (ret > 0) {
293 sctx->key_len = key_len;
294 return setkey_fallback_blk(tfm, in_key, key_len);
295 }
Herbert Xua9e62fa2006-08-21 21:39:24 +1000296
297 switch (key_len) {
298 case 16:
299 sctx->enc = KM_AES_128_ENCRYPT;
300 sctx->dec = KM_AES_128_DECRYPT;
301 break;
302 case 24:
303 sctx->enc = KM_AES_192_ENCRYPT;
304 sctx->dec = KM_AES_192_DECRYPT;
305 break;
306 case 32:
307 sctx->enc = KM_AES_256_ENCRYPT;
308 sctx->dec = KM_AES_256_DECRYPT;
309 break;
310 }
311
312 return aes_set_key(tfm, in_key, key_len);
313}
314
315static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
316 struct blkcipher_walk *walk)
317{
318 int ret = blkcipher_walk_virt(desc, walk);
319 unsigned int nbytes;
320
321 while ((nbytes = walk->nbytes)) {
322 /* only use complete blocks */
323 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
324 u8 *out = walk->dst.virt.addr;
325 u8 *in = walk->src.virt.addr;
326
327 ret = crypt_s390_km(func, param, out, in, n);
328 BUG_ON((ret < 0) || (ret != n));
329
330 nbytes &= AES_BLOCK_SIZE - 1;
331 ret = blkcipher_walk_done(desc, walk, nbytes);
332 }
333
334 return ret;
335}
336
337static int ecb_aes_encrypt(struct blkcipher_desc *desc,
338 struct scatterlist *dst, struct scatterlist *src,
339 unsigned int nbytes)
340{
341 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
342 struct blkcipher_walk walk;
343
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100344 if (unlikely(need_fallback(sctx->key_len)))
345 return fallback_blk_enc(desc, dst, src, nbytes);
346
Herbert Xua9e62fa2006-08-21 21:39:24 +1000347 blkcipher_walk_init(&walk, dst, src, nbytes);
348 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
349}
350
351static int ecb_aes_decrypt(struct blkcipher_desc *desc,
352 struct scatterlist *dst, struct scatterlist *src,
353 unsigned int nbytes)
354{
355 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
356 struct blkcipher_walk walk;
357
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100358 if (unlikely(need_fallback(sctx->key_len)))
359 return fallback_blk_dec(desc, dst, src, nbytes);
360
Herbert Xua9e62fa2006-08-21 21:39:24 +1000361 blkcipher_walk_init(&walk, dst, src, nbytes);
362 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
363}
364
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100365static int fallback_init_blk(struct crypto_tfm *tfm)
366{
367 const char *name = tfm->__crt_alg->cra_name;
368 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
369
370 sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
371 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
372
373 if (IS_ERR(sctx->fallback.blk)) {
Jan Glauber39f09392008-12-25 13:39:37 +0100374 pr_err("Allocating AES fallback algorithm %s failed\n",
375 name);
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100376 return PTR_ERR(sctx->fallback.blk);
377 }
378
379 return 0;
380}
381
382static void fallback_exit_blk(struct crypto_tfm *tfm)
383{
384 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
385
386 crypto_free_blkcipher(sctx->fallback.blk);
387 sctx->fallback.blk = NULL;
388}
389
Herbert Xua9e62fa2006-08-21 21:39:24 +1000390static struct crypto_alg ecb_aes_alg = {
391 .cra_name = "ecb(aes)",
392 .cra_driver_name = "ecb-aes-s390",
393 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
Jan Glauberf67d1362007-05-04 18:47:47 +0200394 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
395 CRYPTO_ALG_NEED_FALLBACK,
Herbert Xua9e62fa2006-08-21 21:39:24 +1000396 .cra_blocksize = AES_BLOCK_SIZE,
397 .cra_ctxsize = sizeof(struct s390_aes_ctx),
398 .cra_type = &crypto_blkcipher_type,
399 .cra_module = THIS_MODULE,
400 .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list),
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100401 .cra_init = fallback_init_blk,
402 .cra_exit = fallback_exit_blk,
Herbert Xua9e62fa2006-08-21 21:39:24 +1000403 .cra_u = {
404 .blkcipher = {
405 .min_keysize = AES_MIN_KEY_SIZE,
406 .max_keysize = AES_MAX_KEY_SIZE,
407 .setkey = ecb_aes_set_key,
408 .encrypt = ecb_aes_encrypt,
409 .decrypt = ecb_aes_decrypt,
410 }
411 }
412};
413
414static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
415 unsigned int key_len)
416{
417 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100418 int ret;
419
420 ret = need_fallback(key_len);
421 if (ret > 0) {
422 sctx->key_len = key_len;
423 return setkey_fallback_blk(tfm, in_key, key_len);
424 }
Herbert Xua9e62fa2006-08-21 21:39:24 +1000425
426 switch (key_len) {
427 case 16:
428 sctx->enc = KMC_AES_128_ENCRYPT;
429 sctx->dec = KMC_AES_128_DECRYPT;
430 break;
431 case 24:
432 sctx->enc = KMC_AES_192_ENCRYPT;
433 sctx->dec = KMC_AES_192_DECRYPT;
434 break;
435 case 32:
436 sctx->enc = KMC_AES_256_ENCRYPT;
437 sctx->dec = KMC_AES_256_DECRYPT;
438 break;
439 }
440
441 return aes_set_key(tfm, in_key, key_len);
442}
443
444static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
445 struct blkcipher_walk *walk)
446{
447 int ret = blkcipher_walk_virt(desc, walk);
448 unsigned int nbytes = walk->nbytes;
449
450 if (!nbytes)
451 goto out;
452
453 memcpy(param, walk->iv, AES_BLOCK_SIZE);
454 do {
455 /* only use complete blocks */
456 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
457 u8 *out = walk->dst.virt.addr;
458 u8 *in = walk->src.virt.addr;
459
460 ret = crypt_s390_kmc(func, param, out, in, n);
461 BUG_ON((ret < 0) || (ret != n));
462
463 nbytes &= AES_BLOCK_SIZE - 1;
464 ret = blkcipher_walk_done(desc, walk, nbytes);
465 } while ((nbytes = walk->nbytes));
466 memcpy(walk->iv, param, AES_BLOCK_SIZE);
467
468out:
469 return ret;
470}
471
472static int cbc_aes_encrypt(struct blkcipher_desc *desc,
473 struct scatterlist *dst, struct scatterlist *src,
474 unsigned int nbytes)
475{
476 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
477 struct blkcipher_walk walk;
478
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100479 if (unlikely(need_fallback(sctx->key_len)))
480 return fallback_blk_enc(desc, dst, src, nbytes);
481
Herbert Xua9e62fa2006-08-21 21:39:24 +1000482 blkcipher_walk_init(&walk, dst, src, nbytes);
483 return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk);
484}
485
486static int cbc_aes_decrypt(struct blkcipher_desc *desc,
487 struct scatterlist *dst, struct scatterlist *src,
488 unsigned int nbytes)
489{
490 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
491 struct blkcipher_walk walk;
492
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100493 if (unlikely(need_fallback(sctx->key_len)))
494 return fallback_blk_dec(desc, dst, src, nbytes);
495
Herbert Xua9e62fa2006-08-21 21:39:24 +1000496 blkcipher_walk_init(&walk, dst, src, nbytes);
497 return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk);
498}
499
500static struct crypto_alg cbc_aes_alg = {
501 .cra_name = "cbc(aes)",
502 .cra_driver_name = "cbc-aes-s390",
503 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
Jan Glauberf67d1362007-05-04 18:47:47 +0200504 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
505 CRYPTO_ALG_NEED_FALLBACK,
Herbert Xua9e62fa2006-08-21 21:39:24 +1000506 .cra_blocksize = AES_BLOCK_SIZE,
507 .cra_ctxsize = sizeof(struct s390_aes_ctx),
508 .cra_type = &crypto_blkcipher_type,
509 .cra_module = THIS_MODULE,
510 .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list),
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100511 .cra_init = fallback_init_blk,
512 .cra_exit = fallback_exit_blk,
Herbert Xua9e62fa2006-08-21 21:39:24 +1000513 .cra_u = {
514 .blkcipher = {
515 .min_keysize = AES_MIN_KEY_SIZE,
516 .max_keysize = AES_MAX_KEY_SIZE,
517 .ivsize = AES_BLOCK_SIZE,
518 .setkey = cbc_aes_set_key,
519 .encrypt = cbc_aes_encrypt,
520 .decrypt = cbc_aes_decrypt,
521 }
522 }
523};
524
Gerald Schaefer99d97222011-04-26 16:12:42 +1000525static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
526 unsigned int len)
527{
528 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
529 unsigned int ret;
530
531 xts_ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
532 xts_ctx->fallback->base.crt_flags |= (tfm->crt_flags &
533 CRYPTO_TFM_REQ_MASK);
534
535 ret = crypto_blkcipher_setkey(xts_ctx->fallback, key, len);
536 if (ret) {
537 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
538 tfm->crt_flags |= (xts_ctx->fallback->base.crt_flags &
539 CRYPTO_TFM_RES_MASK);
540 }
541 return ret;
542}
543
544static int xts_fallback_decrypt(struct blkcipher_desc *desc,
545 struct scatterlist *dst, struct scatterlist *src,
546 unsigned int nbytes)
547{
548 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
549 struct crypto_blkcipher *tfm;
550 unsigned int ret;
551
552 tfm = desc->tfm;
553 desc->tfm = xts_ctx->fallback;
554
555 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
556
557 desc->tfm = tfm;
558 return ret;
559}
560
561static int xts_fallback_encrypt(struct blkcipher_desc *desc,
562 struct scatterlist *dst, struct scatterlist *src,
563 unsigned int nbytes)
564{
565 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
566 struct crypto_blkcipher *tfm;
567 unsigned int ret;
568
569 tfm = desc->tfm;
570 desc->tfm = xts_ctx->fallback;
571
572 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
573
574 desc->tfm = tfm;
575 return ret;
576}
577
578static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
579 unsigned int key_len)
580{
581 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
582 u32 *flags = &tfm->crt_flags;
583
584 switch (key_len) {
585 case 32:
586 xts_ctx->enc = KM_XTS_128_ENCRYPT;
587 xts_ctx->dec = KM_XTS_128_DECRYPT;
588 memcpy(xts_ctx->key + 16, in_key, 16);
589 memcpy(xts_ctx->pcc.key + 16, in_key + 16, 16);
590 break;
591 case 48:
592 xts_ctx->enc = 0;
593 xts_ctx->dec = 0;
594 xts_fallback_setkey(tfm, in_key, key_len);
595 break;
596 case 64:
597 xts_ctx->enc = KM_XTS_256_ENCRYPT;
598 xts_ctx->dec = KM_XTS_256_DECRYPT;
599 memcpy(xts_ctx->key, in_key, 32);
600 memcpy(xts_ctx->pcc.key, in_key + 32, 32);
601 break;
602 default:
603 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
604 return -EINVAL;
605 }
606 xts_ctx->key_len = key_len;
607 return 0;
608}
609
610static int xts_aes_crypt(struct blkcipher_desc *desc, long func,
611 struct s390_xts_ctx *xts_ctx,
612 struct blkcipher_walk *walk)
613{
614 unsigned int offset = (xts_ctx->key_len >> 1) & 0x10;
615 int ret = blkcipher_walk_virt(desc, walk);
616 unsigned int nbytes = walk->nbytes;
617 unsigned int n;
618 u8 *in, *out;
619 void *param;
620
621 if (!nbytes)
622 goto out;
623
624 memset(xts_ctx->pcc.block, 0, sizeof(xts_ctx->pcc.block));
625 memset(xts_ctx->pcc.bit, 0, sizeof(xts_ctx->pcc.bit));
626 memset(xts_ctx->pcc.xts, 0, sizeof(xts_ctx->pcc.xts));
627 memcpy(xts_ctx->pcc.tweak, walk->iv, sizeof(xts_ctx->pcc.tweak));
628 param = xts_ctx->pcc.key + offset;
629 ret = crypt_s390_pcc(func, param);
630 BUG_ON(ret < 0);
631
632 memcpy(xts_ctx->xts_param, xts_ctx->pcc.xts, 16);
633 param = xts_ctx->key + offset;
634 do {
635 /* only use complete blocks */
636 n = nbytes & ~(AES_BLOCK_SIZE - 1);
637 out = walk->dst.virt.addr;
638 in = walk->src.virt.addr;
639
640 ret = crypt_s390_km(func, param, out, in, n);
641 BUG_ON(ret < 0 || ret != n);
642
643 nbytes &= AES_BLOCK_SIZE - 1;
644 ret = blkcipher_walk_done(desc, walk, nbytes);
645 } while ((nbytes = walk->nbytes));
646out:
647 return ret;
648}
649
650static int xts_aes_encrypt(struct blkcipher_desc *desc,
651 struct scatterlist *dst, struct scatterlist *src,
652 unsigned int nbytes)
653{
654 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
655 struct blkcipher_walk walk;
656
657 if (unlikely(xts_ctx->key_len == 48))
658 return xts_fallback_encrypt(desc, dst, src, nbytes);
659
660 blkcipher_walk_init(&walk, dst, src, nbytes);
661 return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk);
662}
663
664static int xts_aes_decrypt(struct blkcipher_desc *desc,
665 struct scatterlist *dst, struct scatterlist *src,
666 unsigned int nbytes)
667{
668 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
669 struct blkcipher_walk walk;
670
671 if (unlikely(xts_ctx->key_len == 48))
672 return xts_fallback_decrypt(desc, dst, src, nbytes);
673
674 blkcipher_walk_init(&walk, dst, src, nbytes);
675 return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk);
676}
677
678static int xts_fallback_init(struct crypto_tfm *tfm)
679{
680 const char *name = tfm->__crt_alg->cra_name;
681 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
682
683 xts_ctx->fallback = crypto_alloc_blkcipher(name, 0,
684 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
685
686 if (IS_ERR(xts_ctx->fallback)) {
687 pr_err("Allocating XTS fallback algorithm %s failed\n",
688 name);
689 return PTR_ERR(xts_ctx->fallback);
690 }
691 return 0;
692}
693
694static void xts_fallback_exit(struct crypto_tfm *tfm)
695{
696 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
697
698 crypto_free_blkcipher(xts_ctx->fallback);
699 xts_ctx->fallback = NULL;
700}
701
702static struct crypto_alg xts_aes_alg = {
703 .cra_name = "xts(aes)",
704 .cra_driver_name = "xts-aes-s390",
705 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
706 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
707 CRYPTO_ALG_NEED_FALLBACK,
708 .cra_blocksize = AES_BLOCK_SIZE,
709 .cra_ctxsize = sizeof(struct s390_xts_ctx),
710 .cra_type = &crypto_blkcipher_type,
711 .cra_module = THIS_MODULE,
712 .cra_list = LIST_HEAD_INIT(xts_aes_alg.cra_list),
713 .cra_init = xts_fallback_init,
714 .cra_exit = xts_fallback_exit,
715 .cra_u = {
716 .blkcipher = {
717 .min_keysize = 2 * AES_MIN_KEY_SIZE,
718 .max_keysize = 2 * AES_MAX_KEY_SIZE,
719 .ivsize = AES_BLOCK_SIZE,
720 .setkey = xts_aes_set_key,
721 .encrypt = xts_aes_encrypt,
722 .decrypt = xts_aes_decrypt,
723 }
724 }
725};
726
Heiko Carstens9f7819c2008-04-17 07:46:17 +0200727static int __init aes_s390_init(void)
Jan Glauberbf754ae2006-01-06 00:19:18 -0800728{
Gerald Schaefer99d97222011-04-26 16:12:42 +1000729 unsigned long long facility_bits[2];
Jan Glauberbf754ae2006-01-06 00:19:18 -0800730 int ret;
731
Jan Glauber1822bc92011-04-19 21:29:14 +0200732 if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA))
Jan Glauber86aa9fc2007-02-05 21:18:14 +0100733 keylen_flag |= AES_KEYLEN_128;
Jan Glauber1822bc92011-04-19 21:29:14 +0200734 if (crypt_s390_func_available(KM_AES_192_ENCRYPT, CRYPT_S390_MSA))
Jan Glauber86aa9fc2007-02-05 21:18:14 +0100735 keylen_flag |= AES_KEYLEN_192;
Jan Glauber1822bc92011-04-19 21:29:14 +0200736 if (crypt_s390_func_available(KM_AES_256_ENCRYPT, CRYPT_S390_MSA))
Jan Glauber86aa9fc2007-02-05 21:18:14 +0100737 keylen_flag |= AES_KEYLEN_256;
Jan Glauberbf754ae2006-01-06 00:19:18 -0800738
Jan Glauber86aa9fc2007-02-05 21:18:14 +0100739 if (!keylen_flag)
740 return -EOPNOTSUPP;
741
742 /* z9 109 and z9 BC/EC only support 128 bit key length */
Sebastian Siewiorb0c3e752007-12-01 12:47:37 +1100743 if (keylen_flag == AES_KEYLEN_128)
Jan Glauber39f09392008-12-25 13:39:37 +0100744 pr_info("AES hardware acceleration is only available for"
745 " 128-bit keys\n");
Jan Glauberbf754ae2006-01-06 00:19:18 -0800746
747 ret = crypto_register_alg(&aes_alg);
Jan Glauber86aa9fc2007-02-05 21:18:14 +0100748 if (ret)
Herbert Xua9e62fa2006-08-21 21:39:24 +1000749 goto aes_err;
Herbert Xua9e62fa2006-08-21 21:39:24 +1000750
751 ret = crypto_register_alg(&ecb_aes_alg);
Jan Glauber86aa9fc2007-02-05 21:18:14 +0100752 if (ret)
Herbert Xua9e62fa2006-08-21 21:39:24 +1000753 goto ecb_aes_err;
Herbert Xua9e62fa2006-08-21 21:39:24 +1000754
755 ret = crypto_register_alg(&cbc_aes_alg);
Jan Glauber86aa9fc2007-02-05 21:18:14 +0100756 if (ret)
Herbert Xua9e62fa2006-08-21 21:39:24 +1000757 goto cbc_aes_err;
Herbert Xua9e62fa2006-08-21 21:39:24 +1000758
Gerald Schaefer99d97222011-04-26 16:12:42 +1000759 if (crypt_s390_func_available(KM_XTS_128_ENCRYPT,
760 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
761 crypt_s390_func_available(KM_XTS_256_ENCRYPT,
762 CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
763 ret = crypto_register_alg(&xts_aes_alg);
764 if (ret)
765 goto xts_aes_err;
766 }
767
Herbert Xua9e62fa2006-08-21 21:39:24 +1000768out:
Jan Glauberbf754ae2006-01-06 00:19:18 -0800769 return ret;
Herbert Xua9e62fa2006-08-21 21:39:24 +1000770
Gerald Schaefer99d97222011-04-26 16:12:42 +1000771xts_aes_err:
772 crypto_unregister_alg(&cbc_aes_alg);
Herbert Xua9e62fa2006-08-21 21:39:24 +1000773cbc_aes_err:
774 crypto_unregister_alg(&ecb_aes_alg);
775ecb_aes_err:
776 crypto_unregister_alg(&aes_alg);
777aes_err:
778 goto out;
Jan Glauberbf754ae2006-01-06 00:19:18 -0800779}
780
Heiko Carstens9f7819c2008-04-17 07:46:17 +0200781static void __exit aes_s390_fini(void)
Jan Glauberbf754ae2006-01-06 00:19:18 -0800782{
Gerald Schaefer99d97222011-04-26 16:12:42 +1000783 crypto_unregister_alg(&xts_aes_alg);
Herbert Xua9e62fa2006-08-21 21:39:24 +1000784 crypto_unregister_alg(&cbc_aes_alg);
785 crypto_unregister_alg(&ecb_aes_alg);
Jan Glauberbf754ae2006-01-06 00:19:18 -0800786 crypto_unregister_alg(&aes_alg);
787}
788
Heiko Carstens9f7819c2008-04-17 07:46:17 +0200789module_init(aes_s390_init);
790module_exit(aes_s390_fini);
Jan Glauberbf754ae2006-01-06 00:19:18 -0800791
Herbert Xua760a662009-02-26 14:06:31 +0800792MODULE_ALIAS("aes-all");
Jan Glauberbf754ae2006-01-06 00:19:18 -0800793
794MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
795MODULE_LICENSE("GPL");