| Loc Ho | 004a403 | 2008-05-14 20:41:47 +0800 | [diff] [blame] | 1 | /* | 
 | 2 |  * Asynchronous Cryptographic Hash operations. | 
 | 3 |  * | 
 | 4 |  * This is the asynchronous version of hash.c with notification of | 
 | 5 |  * completion via a callback. | 
 | 6 |  * | 
 | 7 |  * Copyright (c) 2008 Loc Ho <lho@amcc.com> | 
 | 8 |  * | 
 | 9 |  * This program is free software; you can redistribute it and/or modify it | 
 | 10 |  * under the terms of the GNU General Public License as published by the Free | 
 | 11 |  * Software Foundation; either version 2 of the License, or (at your option) | 
 | 12 |  * any later version. | 
 | 13 |  * | 
 | 14 |  */ | 
 | 15 |  | 
| Herbert Xu | 2003625 | 2008-07-07 22:19:53 +0800 | [diff] [blame] | 16 | #include <crypto/internal/hash.h> | 
 | 17 | #include <crypto/scatterwalk.h> | 
| Loc Ho | 004a403 | 2008-05-14 20:41:47 +0800 | [diff] [blame] | 18 | #include <linux/err.h> | 
 | 19 | #include <linux/kernel.h> | 
 | 20 | #include <linux/module.h> | 
 | 21 | #include <linux/sched.h> | 
 | 22 | #include <linux/slab.h> | 
 | 23 | #include <linux/seq_file.h> | 
 | 24 |  | 
 | 25 | #include "internal.h" | 
 | 26 |  | 
| Herbert Xu | 66f6ce5 | 2009-07-15 12:40:40 +0800 | [diff] [blame] | 27 | struct ahash_request_priv { | 
 | 28 | 	crypto_completion_t complete; | 
 | 29 | 	void *data; | 
 | 30 | 	u8 *result; | 
 | 31 | 	void *ubuf[] CRYPTO_MINALIGN_ATTR; | 
 | 32 | }; | 
 | 33 |  | 
| Herbert Xu | 88056ec | 2009-07-14 12:28:26 +0800 | [diff] [blame] | 34 | static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash) | 
 | 35 | { | 
 | 36 | 	return container_of(crypto_hash_alg_common(hash), struct ahash_alg, | 
 | 37 | 			    halg); | 
 | 38 | } | 
 | 39 |  | 
| Herbert Xu | 2003625 | 2008-07-07 22:19:53 +0800 | [diff] [blame] | 40 | static int hash_walk_next(struct crypto_hash_walk *walk) | 
 | 41 | { | 
 | 42 | 	unsigned int alignmask = walk->alignmask; | 
 | 43 | 	unsigned int offset = walk->offset; | 
 | 44 | 	unsigned int nbytes = min(walk->entrylen, | 
 | 45 | 				  ((unsigned int)(PAGE_SIZE)) - offset); | 
 | 46 |  | 
 | 47 | 	walk->data = crypto_kmap(walk->pg, 0); | 
 | 48 | 	walk->data += offset; | 
 | 49 |  | 
| Szilveszter Ördög | 23a75ee | 2010-08-06 09:26:38 +0800 | [diff] [blame] | 50 | 	if (offset & alignmask) { | 
 | 51 | 		unsigned int unaligned = alignmask + 1 - (offset & alignmask); | 
 | 52 | 		if (nbytes > unaligned) | 
 | 53 | 			nbytes = unaligned; | 
 | 54 | 	} | 
| Herbert Xu | 2003625 | 2008-07-07 22:19:53 +0800 | [diff] [blame] | 55 |  | 
 | 56 | 	walk->entrylen -= nbytes; | 
 | 57 | 	return nbytes; | 
 | 58 | } | 
 | 59 |  | 
 | 60 | static int hash_walk_new_entry(struct crypto_hash_walk *walk) | 
 | 61 | { | 
 | 62 | 	struct scatterlist *sg; | 
 | 63 |  | 
 | 64 | 	sg = walk->sg; | 
 | 65 | 	walk->pg = sg_page(sg); | 
 | 66 | 	walk->offset = sg->offset; | 
 | 67 | 	walk->entrylen = sg->length; | 
 | 68 |  | 
 | 69 | 	if (walk->entrylen > walk->total) | 
 | 70 | 		walk->entrylen = walk->total; | 
 | 71 | 	walk->total -= walk->entrylen; | 
 | 72 |  | 
 | 73 | 	return hash_walk_next(walk); | 
 | 74 | } | 
 | 75 |  | 
 | 76 | int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err) | 
 | 77 | { | 
 | 78 | 	unsigned int alignmask = walk->alignmask; | 
 | 79 | 	unsigned int nbytes = walk->entrylen; | 
 | 80 |  | 
 | 81 | 	walk->data -= walk->offset; | 
 | 82 |  | 
 | 83 | 	if (nbytes && walk->offset & alignmask && !err) { | 
| Herbert Xu | 2003625 | 2008-07-07 22:19:53 +0800 | [diff] [blame] | 84 | 		walk->offset = ALIGN(walk->offset, alignmask + 1); | 
 | 85 | 		walk->data += walk->offset; | 
 | 86 |  | 
 | 87 | 		nbytes = min(nbytes, | 
 | 88 | 			     ((unsigned int)(PAGE_SIZE)) - walk->offset); | 
 | 89 | 		walk->entrylen -= nbytes; | 
 | 90 |  | 
 | 91 | 		return nbytes; | 
 | 92 | 	} | 
 | 93 |  | 
 | 94 | 	crypto_kunmap(walk->data, 0); | 
 | 95 | 	crypto_yield(walk->flags); | 
 | 96 |  | 
 | 97 | 	if (err) | 
 | 98 | 		return err; | 
 | 99 |  | 
| Herbert Xu | d315a0e | 2009-05-31 23:09:22 +1000 | [diff] [blame] | 100 | 	if (nbytes) { | 
 | 101 | 		walk->offset = 0; | 
 | 102 | 		walk->pg++; | 
| Herbert Xu | 2003625 | 2008-07-07 22:19:53 +0800 | [diff] [blame] | 103 | 		return hash_walk_next(walk); | 
| Herbert Xu | d315a0e | 2009-05-31 23:09:22 +1000 | [diff] [blame] | 104 | 	} | 
| Herbert Xu | 2003625 | 2008-07-07 22:19:53 +0800 | [diff] [blame] | 105 |  | 
 | 106 | 	if (!walk->total) | 
 | 107 | 		return 0; | 
 | 108 |  | 
 | 109 | 	walk->sg = scatterwalk_sg_next(walk->sg); | 
 | 110 |  | 
 | 111 | 	return hash_walk_new_entry(walk); | 
 | 112 | } | 
 | 113 | EXPORT_SYMBOL_GPL(crypto_hash_walk_done); | 
 | 114 |  | 
 | 115 | int crypto_hash_walk_first(struct ahash_request *req, | 
 | 116 | 			   struct crypto_hash_walk *walk) | 
 | 117 | { | 
 | 118 | 	walk->total = req->nbytes; | 
 | 119 |  | 
 | 120 | 	if (!walk->total) | 
 | 121 | 		return 0; | 
 | 122 |  | 
 | 123 | 	walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req)); | 
 | 124 | 	walk->sg = req->src; | 
 | 125 | 	walk->flags = req->base.flags; | 
 | 126 |  | 
 | 127 | 	return hash_walk_new_entry(walk); | 
 | 128 | } | 
 | 129 | EXPORT_SYMBOL_GPL(crypto_hash_walk_first); | 
 | 130 |  | 
| Herbert Xu | 5f7082e | 2008-08-31 22:21:09 +1000 | [diff] [blame] | 131 | int crypto_hash_walk_first_compat(struct hash_desc *hdesc, | 
 | 132 | 				  struct crypto_hash_walk *walk, | 
 | 133 | 				  struct scatterlist *sg, unsigned int len) | 
 | 134 | { | 
 | 135 | 	walk->total = len; | 
 | 136 |  | 
 | 137 | 	if (!walk->total) | 
 | 138 | 		return 0; | 
 | 139 |  | 
 | 140 | 	walk->alignmask = crypto_hash_alignmask(hdesc->tfm); | 
 | 141 | 	walk->sg = sg; | 
 | 142 | 	walk->flags = hdesc->flags; | 
 | 143 |  | 
 | 144 | 	return hash_walk_new_entry(walk); | 
 | 145 | } | 
 | 146 |  | 
| Loc Ho | 004a403 | 2008-05-14 20:41:47 +0800 | [diff] [blame] | 147 | static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key, | 
 | 148 | 				unsigned int keylen) | 
 | 149 | { | 
| Loc Ho | 004a403 | 2008-05-14 20:41:47 +0800 | [diff] [blame] | 150 | 	unsigned long alignmask = crypto_ahash_alignmask(tfm); | 
 | 151 | 	int ret; | 
 | 152 | 	u8 *buffer, *alignbuffer; | 
 | 153 | 	unsigned long absize; | 
 | 154 |  | 
 | 155 | 	absize = keylen + alignmask; | 
| Herbert Xu | 093900c | 2009-07-14 21:48:35 +0800 | [diff] [blame] | 156 | 	buffer = kmalloc(absize, GFP_KERNEL); | 
| Loc Ho | 004a403 | 2008-05-14 20:41:47 +0800 | [diff] [blame] | 157 | 	if (!buffer) | 
 | 158 | 		return -ENOMEM; | 
 | 159 |  | 
 | 160 | 	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | 
 | 161 | 	memcpy(alignbuffer, key, keylen); | 
| Herbert Xu | a70c522 | 2009-07-15 20:39:05 +0800 | [diff] [blame] | 162 | 	ret = tfm->setkey(tfm, alignbuffer, keylen); | 
| Herbert Xu | 8c32c51 | 2009-07-14 21:35:36 +0800 | [diff] [blame] | 163 | 	kzfree(buffer); | 
| Loc Ho | 004a403 | 2008-05-14 20:41:47 +0800 | [diff] [blame] | 164 | 	return ret; | 
 | 165 | } | 
 | 166 |  | 
| Herbert Xu | 66f6ce5 | 2009-07-15 12:40:40 +0800 | [diff] [blame] | 167 | int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, | 
| Loc Ho | 004a403 | 2008-05-14 20:41:47 +0800 | [diff] [blame] | 168 | 			unsigned int keylen) | 
 | 169 | { | 
| Loc Ho | 004a403 | 2008-05-14 20:41:47 +0800 | [diff] [blame] | 170 | 	unsigned long alignmask = crypto_ahash_alignmask(tfm); | 
 | 171 |  | 
 | 172 | 	if ((unsigned long)key & alignmask) | 
 | 173 | 		return ahash_setkey_unaligned(tfm, key, keylen); | 
 | 174 |  | 
| Herbert Xu | a70c522 | 2009-07-15 20:39:05 +0800 | [diff] [blame] | 175 | 	return tfm->setkey(tfm, key, keylen); | 
| Loc Ho | 004a403 | 2008-05-14 20:41:47 +0800 | [diff] [blame] | 176 | } | 
| Herbert Xu | 66f6ce5 | 2009-07-15 12:40:40 +0800 | [diff] [blame] | 177 | EXPORT_SYMBOL_GPL(crypto_ahash_setkey); | 
| Loc Ho | 004a403 | 2008-05-14 20:41:47 +0800 | [diff] [blame] | 178 |  | 
| Herbert Xu | 3751f40 | 2008-11-08 08:56:57 +0800 | [diff] [blame] | 179 | static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, | 
 | 180 | 			  unsigned int keylen) | 
 | 181 | { | 
 | 182 | 	return -ENOSYS; | 
 | 183 | } | 
 | 184 |  | 
| Herbert Xu | 66f6ce5 | 2009-07-15 12:40:40 +0800 | [diff] [blame] | 185 | static inline unsigned int ahash_align_buffer_size(unsigned len, | 
 | 186 | 						   unsigned long mask) | 
 | 187 | { | 
 | 188 | 	return len + (mask & ~(crypto_tfm_ctx_alignment() - 1)); | 
 | 189 | } | 
 | 190 |  | 
 | 191 | static void ahash_op_unaligned_finish(struct ahash_request *req, int err) | 
 | 192 | { | 
 | 193 | 	struct ahash_request_priv *priv = req->priv; | 
 | 194 |  | 
 | 195 | 	if (err == -EINPROGRESS) | 
 | 196 | 		return; | 
 | 197 |  | 
 | 198 | 	if (!err) | 
 | 199 | 		memcpy(priv->result, req->result, | 
 | 200 | 		       crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); | 
 | 201 |  | 
 | 202 | 	kzfree(priv); | 
 | 203 | } | 
 | 204 |  | 
 | 205 | static void ahash_op_unaligned_done(struct crypto_async_request *req, int err) | 
 | 206 | { | 
 | 207 | 	struct ahash_request *areq = req->data; | 
 | 208 | 	struct ahash_request_priv *priv = areq->priv; | 
 | 209 | 	crypto_completion_t complete = priv->complete; | 
 | 210 | 	void *data = priv->data; | 
 | 211 |  | 
 | 212 | 	ahash_op_unaligned_finish(areq, err); | 
 | 213 |  | 
 | 214 | 	complete(data, err); | 
 | 215 | } | 
 | 216 |  | 
 | 217 | static int ahash_op_unaligned(struct ahash_request *req, | 
 | 218 | 			      int (*op)(struct ahash_request *)) | 
 | 219 | { | 
 | 220 | 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | 
 | 221 | 	unsigned long alignmask = crypto_ahash_alignmask(tfm); | 
 | 222 | 	unsigned int ds = crypto_ahash_digestsize(tfm); | 
 | 223 | 	struct ahash_request_priv *priv; | 
 | 224 | 	int err; | 
 | 225 |  | 
 | 226 | 	priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), | 
 | 227 | 		       (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? | 
| Steffen Klassert | 5befbd5 | 2009-07-24 13:56:31 +0800 | [diff] [blame] | 228 | 		       GFP_KERNEL : GFP_ATOMIC); | 
| Herbert Xu | 66f6ce5 | 2009-07-15 12:40:40 +0800 | [diff] [blame] | 229 | 	if (!priv) | 
 | 230 | 		return -ENOMEM; | 
 | 231 |  | 
 | 232 | 	priv->result = req->result; | 
 | 233 | 	priv->complete = req->base.complete; | 
 | 234 | 	priv->data = req->base.data; | 
 | 235 |  | 
 | 236 | 	req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); | 
 | 237 | 	req->base.complete = ahash_op_unaligned_done; | 
 | 238 | 	req->base.data = req; | 
 | 239 | 	req->priv = priv; | 
 | 240 |  | 
 | 241 | 	err = op(req); | 
 | 242 | 	ahash_op_unaligned_finish(req, err); | 
 | 243 |  | 
 | 244 | 	return err; | 
 | 245 | } | 
 | 246 |  | 
 | 247 | static int crypto_ahash_op(struct ahash_request *req, | 
 | 248 | 			   int (*op)(struct ahash_request *)) | 
 | 249 | { | 
 | 250 | 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | 
 | 251 | 	unsigned long alignmask = crypto_ahash_alignmask(tfm); | 
 | 252 |  | 
 | 253 | 	if ((unsigned long)req->result & alignmask) | 
 | 254 | 		return ahash_op_unaligned(req, op); | 
 | 255 |  | 
 | 256 | 	return op(req); | 
 | 257 | } | 
 | 258 |  | 
 | 259 | int crypto_ahash_final(struct ahash_request *req) | 
 | 260 | { | 
 | 261 | 	return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final); | 
 | 262 | } | 
 | 263 | EXPORT_SYMBOL_GPL(crypto_ahash_final); | 
 | 264 |  | 
 | 265 | int crypto_ahash_finup(struct ahash_request *req) | 
 | 266 | { | 
 | 267 | 	return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup); | 
 | 268 | } | 
 | 269 | EXPORT_SYMBOL_GPL(crypto_ahash_finup); | 
 | 270 |  | 
 | 271 | int crypto_ahash_digest(struct ahash_request *req) | 
 | 272 | { | 
 | 273 | 	return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->digest); | 
 | 274 | } | 
 | 275 | EXPORT_SYMBOL_GPL(crypto_ahash_digest); | 
 | 276 |  | 
 | 277 | static void ahash_def_finup_finish2(struct ahash_request *req, int err) | 
 | 278 | { | 
 | 279 | 	struct ahash_request_priv *priv = req->priv; | 
 | 280 |  | 
 | 281 | 	if (err == -EINPROGRESS) | 
 | 282 | 		return; | 
 | 283 |  | 
 | 284 | 	if (!err) | 
 | 285 | 		memcpy(priv->result, req->result, | 
 | 286 | 		       crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); | 
 | 287 |  | 
 | 288 | 	kzfree(priv); | 
 | 289 | } | 
 | 290 |  | 
 | 291 | static void ahash_def_finup_done2(struct crypto_async_request *req, int err) | 
 | 292 | { | 
 | 293 | 	struct ahash_request *areq = req->data; | 
 | 294 | 	struct ahash_request_priv *priv = areq->priv; | 
 | 295 | 	crypto_completion_t complete = priv->complete; | 
 | 296 | 	void *data = priv->data; | 
 | 297 |  | 
 | 298 | 	ahash_def_finup_finish2(areq, err); | 
 | 299 |  | 
 | 300 | 	complete(data, err); | 
 | 301 | } | 
 | 302 |  | 
 | 303 | static int ahash_def_finup_finish1(struct ahash_request *req, int err) | 
 | 304 | { | 
 | 305 | 	if (err) | 
 | 306 | 		goto out; | 
 | 307 |  | 
 | 308 | 	req->base.complete = ahash_def_finup_done2; | 
 | 309 | 	req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | 
 | 310 | 	err = crypto_ahash_reqtfm(req)->final(req); | 
 | 311 |  | 
 | 312 | out: | 
 | 313 | 	ahash_def_finup_finish2(req, err); | 
 | 314 | 	return err; | 
 | 315 | } | 
 | 316 |  | 
 | 317 | static void ahash_def_finup_done1(struct crypto_async_request *req, int err) | 
 | 318 | { | 
 | 319 | 	struct ahash_request *areq = req->data; | 
 | 320 | 	struct ahash_request_priv *priv = areq->priv; | 
 | 321 | 	crypto_completion_t complete = priv->complete; | 
 | 322 | 	void *data = priv->data; | 
 | 323 |  | 
 | 324 | 	err = ahash_def_finup_finish1(areq, err); | 
 | 325 |  | 
 | 326 | 	complete(data, err); | 
 | 327 | } | 
 | 328 |  | 
 | 329 | static int ahash_def_finup(struct ahash_request *req) | 
 | 330 | { | 
 | 331 | 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | 
 | 332 | 	unsigned long alignmask = crypto_ahash_alignmask(tfm); | 
 | 333 | 	unsigned int ds = crypto_ahash_digestsize(tfm); | 
 | 334 | 	struct ahash_request_priv *priv; | 
 | 335 |  | 
 | 336 | 	priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), | 
 | 337 | 		       (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? | 
| Steffen Klassert | 5befbd5 | 2009-07-24 13:56:31 +0800 | [diff] [blame] | 338 | 		       GFP_KERNEL : GFP_ATOMIC); | 
| Herbert Xu | 66f6ce5 | 2009-07-15 12:40:40 +0800 | [diff] [blame] | 339 | 	if (!priv) | 
 | 340 | 		return -ENOMEM; | 
 | 341 |  | 
 | 342 | 	priv->result = req->result; | 
 | 343 | 	priv->complete = req->base.complete; | 
 | 344 | 	priv->data = req->base.data; | 
 | 345 |  | 
 | 346 | 	req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); | 
 | 347 | 	req->base.complete = ahash_def_finup_done1; | 
 | 348 | 	req->base.data = req; | 
 | 349 | 	req->priv = priv; | 
 | 350 |  | 
 | 351 | 	return ahash_def_finup_finish1(req, tfm->update(req)); | 
 | 352 | } | 
 | 353 |  | 
 | 354 | static int ahash_no_export(struct ahash_request *req, void *out) | 
 | 355 | { | 
 | 356 | 	return -ENOSYS; | 
 | 357 | } | 
 | 358 |  | 
 | 359 | static int ahash_no_import(struct ahash_request *req, const void *in) | 
 | 360 | { | 
 | 361 | 	return -ENOSYS; | 
 | 362 | } | 
 | 363 |  | 
| Herbert Xu | 88056ec | 2009-07-14 12:28:26 +0800 | [diff] [blame] | 364 | static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) | 
 | 365 | { | 
 | 366 | 	struct crypto_ahash *hash = __crypto_ahash_cast(tfm); | 
 | 367 | 	struct ahash_alg *alg = crypto_ahash_alg(hash); | 
| Herbert Xu | 88056ec | 2009-07-14 12:28:26 +0800 | [diff] [blame] | 368 |  | 
| Herbert Xu | 66f6ce5 | 2009-07-15 12:40:40 +0800 | [diff] [blame] | 369 | 	hash->setkey = ahash_nosetkey; | 
 | 370 | 	hash->export = ahash_no_export; | 
 | 371 | 	hash->import = ahash_no_import; | 
 | 372 |  | 
| Herbert Xu | 88056ec | 2009-07-14 12:28:26 +0800 | [diff] [blame] | 373 | 	if (tfm->__crt_alg->cra_type != &crypto_ahash_type) | 
 | 374 | 		return crypto_init_shash_ops_async(tfm); | 
 | 375 |  | 
| Herbert Xu | 88056ec | 2009-07-14 12:28:26 +0800 | [diff] [blame] | 376 | 	hash->init = alg->init; | 
 | 377 | 	hash->update = alg->update; | 
| Herbert Xu | 66f6ce5 | 2009-07-15 12:40:40 +0800 | [diff] [blame] | 378 | 	hash->final = alg->final; | 
 | 379 | 	hash->finup = alg->finup ?: ahash_def_finup; | 
| Herbert Xu | 88056ec | 2009-07-14 12:28:26 +0800 | [diff] [blame] | 380 | 	hash->digest = alg->digest; | 
| Herbert Xu | 66f6ce5 | 2009-07-15 12:40:40 +0800 | [diff] [blame] | 381 |  | 
 | 382 | 	if (alg->setkey) | 
 | 383 | 		hash->setkey = alg->setkey; | 
 | 384 | 	if (alg->export) | 
 | 385 | 		hash->export = alg->export; | 
 | 386 | 	if (alg->import) | 
 | 387 | 		hash->import = alg->import; | 
| Herbert Xu | 88056ec | 2009-07-14 12:28:26 +0800 | [diff] [blame] | 388 |  | 
 | 389 | 	return 0; | 
 | 390 | } | 
 | 391 |  | 
 | 392 | static unsigned int crypto_ahash_extsize(struct crypto_alg *alg) | 
 | 393 | { | 
 | 394 | 	if (alg->cra_type == &crypto_ahash_type) | 
 | 395 | 		return alg->cra_ctxsize; | 
 | 396 |  | 
 | 397 | 	return sizeof(struct crypto_shash *); | 
 | 398 | } | 
 | 399 |  | 
| Loc Ho | 004a403 | 2008-05-14 20:41:47 +0800 | [diff] [blame] | 400 | static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) | 
 | 401 | 	__attribute__ ((unused)); | 
 | 402 | static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) | 
 | 403 | { | 
 | 404 | 	seq_printf(m, "type         : ahash\n"); | 
 | 405 | 	seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ? | 
 | 406 | 					     "yes" : "no"); | 
 | 407 | 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize); | 
| Herbert Xu | 88056ec | 2009-07-14 12:28:26 +0800 | [diff] [blame] | 408 | 	seq_printf(m, "digestsize   : %u\n", | 
 | 409 | 		   __crypto_hash_alg_common(alg)->digestsize); | 
| Loc Ho | 004a403 | 2008-05-14 20:41:47 +0800 | [diff] [blame] | 410 | } | 
 | 411 |  | 
 | 412 | const struct crypto_type crypto_ahash_type = { | 
| Herbert Xu | 88056ec | 2009-07-14 12:28:26 +0800 | [diff] [blame] | 413 | 	.extsize = crypto_ahash_extsize, | 
 | 414 | 	.init_tfm = crypto_ahash_init_tfm, | 
| Loc Ho | 004a403 | 2008-05-14 20:41:47 +0800 | [diff] [blame] | 415 | #ifdef CONFIG_PROC_FS | 
 | 416 | 	.show = crypto_ahash_show, | 
 | 417 | #endif | 
| Herbert Xu | 88056ec | 2009-07-14 12:28:26 +0800 | [diff] [blame] | 418 | 	.maskclear = ~CRYPTO_ALG_TYPE_MASK, | 
 | 419 | 	.maskset = CRYPTO_ALG_TYPE_AHASH_MASK, | 
 | 420 | 	.type = CRYPTO_ALG_TYPE_AHASH, | 
 | 421 | 	.tfmsize = offsetof(struct crypto_ahash, base), | 
| Loc Ho | 004a403 | 2008-05-14 20:41:47 +0800 | [diff] [blame] | 422 | }; | 
 | 423 | EXPORT_SYMBOL_GPL(crypto_ahash_type); | 
 | 424 |  | 
| Herbert Xu | 88056ec | 2009-07-14 12:28:26 +0800 | [diff] [blame] | 425 | struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type, | 
 | 426 | 					u32 mask) | 
 | 427 | { | 
 | 428 | 	return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask); | 
 | 429 | } | 
 | 430 | EXPORT_SYMBOL_GPL(crypto_alloc_ahash); | 
 | 431 |  | 
| Herbert Xu | 01c2dec | 2009-07-14 14:06:06 +0800 | [diff] [blame] | 432 | static int ahash_prepare_alg(struct ahash_alg *alg) | 
 | 433 | { | 
 | 434 | 	struct crypto_alg *base = &alg->halg.base; | 
 | 435 |  | 
 | 436 | 	if (alg->halg.digestsize > PAGE_SIZE / 8 || | 
 | 437 | 	    alg->halg.statesize > PAGE_SIZE / 8) | 
 | 438 | 		return -EINVAL; | 
 | 439 |  | 
 | 440 | 	base->cra_type = &crypto_ahash_type; | 
 | 441 | 	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; | 
 | 442 | 	base->cra_flags |= CRYPTO_ALG_TYPE_AHASH; | 
 | 443 |  | 
 | 444 | 	return 0; | 
 | 445 | } | 
 | 446 |  | 
 | 447 | int crypto_register_ahash(struct ahash_alg *alg) | 
 | 448 | { | 
 | 449 | 	struct crypto_alg *base = &alg->halg.base; | 
 | 450 | 	int err; | 
 | 451 |  | 
 | 452 | 	err = ahash_prepare_alg(alg); | 
 | 453 | 	if (err) | 
 | 454 | 		return err; | 
 | 455 |  | 
 | 456 | 	return crypto_register_alg(base); | 
 | 457 | } | 
 | 458 | EXPORT_SYMBOL_GPL(crypto_register_ahash); | 
 | 459 |  | 
 | 460 | int crypto_unregister_ahash(struct ahash_alg *alg) | 
 | 461 | { | 
 | 462 | 	return crypto_unregister_alg(&alg->halg.base); | 
 | 463 | } | 
 | 464 | EXPORT_SYMBOL_GPL(crypto_unregister_ahash); | 
 | 465 |  | 
 | 466 | int ahash_register_instance(struct crypto_template *tmpl, | 
 | 467 | 			    struct ahash_instance *inst) | 
 | 468 | { | 
 | 469 | 	int err; | 
 | 470 |  | 
 | 471 | 	err = ahash_prepare_alg(&inst->alg); | 
 | 472 | 	if (err) | 
 | 473 | 		return err; | 
 | 474 |  | 
 | 475 | 	return crypto_register_instance(tmpl, ahash_crypto_instance(inst)); | 
 | 476 | } | 
 | 477 | EXPORT_SYMBOL_GPL(ahash_register_instance); | 
 | 478 |  | 
 | 479 | void ahash_free_instance(struct crypto_instance *inst) | 
 | 480 | { | 
 | 481 | 	crypto_drop_spawn(crypto_instance_ctx(inst)); | 
 | 482 | 	kfree(ahash_instance(inst)); | 
 | 483 | } | 
 | 484 | EXPORT_SYMBOL_GPL(ahash_free_instance); | 
 | 485 |  | 
 | 486 | int crypto_init_ahash_spawn(struct crypto_ahash_spawn *spawn, | 
 | 487 | 			    struct hash_alg_common *alg, | 
 | 488 | 			    struct crypto_instance *inst) | 
 | 489 | { | 
 | 490 | 	return crypto_init_spawn2(&spawn->base, &alg->base, inst, | 
 | 491 | 				  &crypto_ahash_type); | 
 | 492 | } | 
 | 493 | EXPORT_SYMBOL_GPL(crypto_init_ahash_spawn); | 
 | 494 |  | 
 | 495 | struct hash_alg_common *ahash_attr_alg(struct rtattr *rta, u32 type, u32 mask) | 
 | 496 | { | 
 | 497 | 	struct crypto_alg *alg; | 
 | 498 |  | 
 | 499 | 	alg = crypto_attr_alg2(rta, &crypto_ahash_type, type, mask); | 
 | 500 | 	return IS_ERR(alg) ? ERR_CAST(alg) : __crypto_hash_alg_common(alg); | 
 | 501 | } | 
 | 502 | EXPORT_SYMBOL_GPL(ahash_attr_alg); | 
 | 503 |  | 
| Loc Ho | 004a403 | 2008-05-14 20:41:47 +0800 | [diff] [blame] | 504 | MODULE_LICENSE("GPL"); | 
 | 505 | MODULE_DESCRIPTION("Asynchronous cryptographic hash type"); |