David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2 | /* |
| 3 | * Scatterlist Cryptographic API. |
| 4 | * |
| 5 | * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> |
| 6 | * Copyright (c) 2002 David S. Miller (davem@redhat.com) |
| 7 | * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> |
| 8 | * |
| 9 | * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no> |
| 10 | * and Nettle, by Niels Möller. |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 11 | */ |
| 12 | |
| 13 | #include <linux/err.h> |
| 14 | #include <linux/errno.h> |
| 15 | #include <linux/kernel.h> |
| 16 | #include <linux/kmod.h> |
| 17 | #include <linux/module.h> |
| 18 | #include <linux/param.h> |
| 19 | #include <linux/sched/signal.h> |
| 20 | #include <linux/slab.h> |
| 21 | #include <linux/string.h> |
| 22 | #include <linux/completion.h> |
| 23 | #include "internal.h" |
| 24 | |
| 25 | LIST_HEAD(crypto_alg_list); |
| 26 | EXPORT_SYMBOL_GPL(crypto_alg_list); |
| 27 | DECLARE_RWSEM(crypto_alg_sem); |
| 28 | EXPORT_SYMBOL_GPL(crypto_alg_sem); |
| 29 | |
| 30 | BLOCKING_NOTIFIER_HEAD(crypto_chain); |
| 31 | EXPORT_SYMBOL_GPL(crypto_chain); |
| 32 | |
| 33 | static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg); |
| 34 | |
| 35 | struct crypto_alg *crypto_mod_get(struct crypto_alg *alg) |
| 36 | { |
| 37 | return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL; |
| 38 | } |
| 39 | EXPORT_SYMBOL_GPL(crypto_mod_get); |
| 40 | |
| 41 | void crypto_mod_put(struct crypto_alg *alg) |
| 42 | { |
| 43 | struct module *module = alg->cra_module; |
| 44 | |
| 45 | crypto_alg_put(alg); |
| 46 | module_put(module); |
| 47 | } |
| 48 | EXPORT_SYMBOL_GPL(crypto_mod_put); |
| 49 | |
| 50 | static inline int crypto_is_test_larval(struct crypto_larval *larval) |
| 51 | { |
| 52 | return larval->alg.cra_driver_name[0]; |
| 53 | } |
| 54 | |
| 55 | static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, |
| 56 | u32 mask) |
| 57 | { |
| 58 | struct crypto_alg *q, *alg = NULL; |
| 59 | int best = -2; |
| 60 | |
| 61 | list_for_each_entry(q, &crypto_alg_list, cra_list) { |
| 62 | int exact, fuzzy; |
| 63 | |
| 64 | if (crypto_is_moribund(q)) |
| 65 | continue; |
| 66 | |
| 67 | if ((q->cra_flags ^ type) & mask) |
| 68 | continue; |
| 69 | |
| 70 | if (crypto_is_larval(q) && |
| 71 | !crypto_is_test_larval((struct crypto_larval *)q) && |
| 72 | ((struct crypto_larval *)q)->mask != mask) |
| 73 | continue; |
| 74 | |
| 75 | exact = !strcmp(q->cra_driver_name, name); |
| 76 | fuzzy = !strcmp(q->cra_name, name); |
| 77 | if (!exact && !(fuzzy && q->cra_priority > best)) |
| 78 | continue; |
| 79 | |
| 80 | if (unlikely(!crypto_mod_get(q))) |
| 81 | continue; |
| 82 | |
| 83 | best = q->cra_priority; |
| 84 | if (alg) |
| 85 | crypto_mod_put(alg); |
| 86 | alg = q; |
| 87 | |
| 88 | if (exact) |
| 89 | break; |
| 90 | } |
| 91 | |
| 92 | return alg; |
| 93 | } |
| 94 | |
| 95 | static void crypto_larval_destroy(struct crypto_alg *alg) |
| 96 | { |
| 97 | struct crypto_larval *larval = (void *)alg; |
| 98 | |
| 99 | BUG_ON(!crypto_is_larval(alg)); |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame] | 100 | if (!IS_ERR_OR_NULL(larval->adult)) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 101 | crypto_mod_put(larval->adult); |
| 102 | kfree(larval); |
| 103 | } |
| 104 | |
| 105 | struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask) |
| 106 | { |
| 107 | struct crypto_larval *larval; |
| 108 | |
| 109 | larval = kzalloc(sizeof(*larval), GFP_KERNEL); |
| 110 | if (!larval) |
| 111 | return ERR_PTR(-ENOMEM); |
| 112 | |
| 113 | larval->mask = mask; |
| 114 | larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type; |
| 115 | larval->alg.cra_priority = -1; |
| 116 | larval->alg.cra_destroy = crypto_larval_destroy; |
| 117 | |
| 118 | strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME); |
| 119 | init_completion(&larval->completion); |
| 120 | |
| 121 | return larval; |
| 122 | } |
| 123 | EXPORT_SYMBOL_GPL(crypto_larval_alloc); |
| 124 | |
| 125 | static struct crypto_alg *crypto_larval_add(const char *name, u32 type, |
| 126 | u32 mask) |
| 127 | { |
| 128 | struct crypto_alg *alg; |
| 129 | struct crypto_larval *larval; |
| 130 | |
| 131 | larval = crypto_larval_alloc(name, type, mask); |
| 132 | if (IS_ERR(larval)) |
| 133 | return ERR_CAST(larval); |
| 134 | |
| 135 | refcount_set(&larval->alg.cra_refcnt, 2); |
| 136 | |
| 137 | down_write(&crypto_alg_sem); |
| 138 | alg = __crypto_alg_lookup(name, type, mask); |
| 139 | if (!alg) { |
| 140 | alg = &larval->alg; |
| 141 | list_add(&alg->cra_list, &crypto_alg_list); |
| 142 | } |
| 143 | up_write(&crypto_alg_sem); |
| 144 | |
| 145 | if (alg != &larval->alg) { |
| 146 | kfree(larval); |
| 147 | if (crypto_is_larval(alg)) |
| 148 | alg = crypto_larval_wait(alg); |
| 149 | } |
| 150 | |
| 151 | return alg; |
| 152 | } |
| 153 | |
| 154 | void crypto_larval_kill(struct crypto_alg *alg) |
| 155 | { |
| 156 | struct crypto_larval *larval = (void *)alg; |
| 157 | |
| 158 | down_write(&crypto_alg_sem); |
| 159 | list_del(&alg->cra_list); |
| 160 | up_write(&crypto_alg_sem); |
| 161 | complete_all(&larval->completion); |
| 162 | crypto_alg_put(alg); |
| 163 | } |
| 164 | EXPORT_SYMBOL_GPL(crypto_larval_kill); |
| 165 | |
| 166 | static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg) |
| 167 | { |
| 168 | struct crypto_larval *larval = (void *)alg; |
| 169 | long timeout; |
| 170 | |
| 171 | timeout = wait_for_completion_killable_timeout( |
| 172 | &larval->completion, 60 * HZ); |
| 173 | |
| 174 | alg = larval->adult; |
| 175 | if (timeout < 0) |
| 176 | alg = ERR_PTR(-EINTR); |
| 177 | else if (!timeout) |
| 178 | alg = ERR_PTR(-ETIMEDOUT); |
| 179 | else if (!alg) |
| 180 | alg = ERR_PTR(-ENOENT); |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame] | 181 | else if (IS_ERR(alg)) |
| 182 | ; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 183 | else if (crypto_is_test_larval(larval) && |
| 184 | !(alg->cra_flags & CRYPTO_ALG_TESTED)) |
| 185 | alg = ERR_PTR(-EAGAIN); |
| 186 | else if (!crypto_mod_get(alg)) |
| 187 | alg = ERR_PTR(-EAGAIN); |
| 188 | crypto_mod_put(&larval->alg); |
| 189 | |
| 190 | return alg; |
| 191 | } |
| 192 | |
| 193 | static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, |
| 194 | u32 mask) |
| 195 | { |
| 196 | struct crypto_alg *alg; |
| 197 | u32 test = 0; |
| 198 | |
| 199 | if (!((type | mask) & CRYPTO_ALG_TESTED)) |
| 200 | test |= CRYPTO_ALG_TESTED; |
| 201 | |
| 202 | down_read(&crypto_alg_sem); |
| 203 | alg = __crypto_alg_lookup(name, type | test, mask | test); |
| 204 | if (!alg && test) { |
| 205 | alg = __crypto_alg_lookup(name, type, mask); |
| 206 | if (alg && !crypto_is_larval(alg)) { |
| 207 | /* Test failed */ |
| 208 | crypto_mod_put(alg); |
| 209 | alg = ERR_PTR(-ELIBBAD); |
| 210 | } |
| 211 | } |
| 212 | up_read(&crypto_alg_sem); |
| 213 | |
| 214 | return alg; |
| 215 | } |
| 216 | |
| 217 | static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, |
| 218 | u32 mask) |
| 219 | { |
| 220 | struct crypto_alg *alg; |
| 221 | |
| 222 | if (!name) |
| 223 | return ERR_PTR(-ENOENT); |
| 224 | |
| 225 | type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD); |
| 226 | mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD); |
| 227 | |
| 228 | alg = crypto_alg_lookup(name, type, mask); |
| 229 | if (!alg && !(mask & CRYPTO_NOLOAD)) { |
| 230 | request_module("crypto-%s", name); |
| 231 | |
| 232 | if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask & |
| 233 | CRYPTO_ALG_NEED_FALLBACK)) |
| 234 | request_module("crypto-%s-all", name); |
| 235 | |
| 236 | alg = crypto_alg_lookup(name, type, mask); |
| 237 | } |
| 238 | |
| 239 | if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg)) |
| 240 | alg = crypto_larval_wait(alg); |
| 241 | else if (!alg) |
| 242 | alg = crypto_larval_add(name, type, mask); |
| 243 | |
| 244 | return alg; |
| 245 | } |
| 246 | |
| 247 | int crypto_probing_notify(unsigned long val, void *v) |
| 248 | { |
| 249 | int ok; |
| 250 | |
| 251 | ok = blocking_notifier_call_chain(&crypto_chain, val, v); |
| 252 | if (ok == NOTIFY_DONE) { |
| 253 | request_module("cryptomgr"); |
| 254 | ok = blocking_notifier_call_chain(&crypto_chain, val, v); |
| 255 | } |
| 256 | |
| 257 | return ok; |
| 258 | } |
| 259 | EXPORT_SYMBOL_GPL(crypto_probing_notify); |
| 260 | |
| 261 | struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) |
| 262 | { |
| 263 | struct crypto_alg *alg; |
| 264 | struct crypto_alg *larval; |
| 265 | int ok; |
| 266 | |
| 267 | /* |
| 268 | * If the internal flag is set for a cipher, require a caller to |
| 269 | * to invoke the cipher with the internal flag to use that cipher. |
| 270 | * Also, if a caller wants to allocate a cipher that may or may |
| 271 | * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and |
| 272 | * !(mask & CRYPTO_ALG_INTERNAL). |
| 273 | */ |
| 274 | if (!((type | mask) & CRYPTO_ALG_INTERNAL)) |
| 275 | mask |= CRYPTO_ALG_INTERNAL; |
| 276 | |
| 277 | larval = crypto_larval_lookup(name, type, mask); |
| 278 | if (IS_ERR(larval) || !crypto_is_larval(larval)) |
| 279 | return larval; |
| 280 | |
| 281 | ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval); |
| 282 | |
| 283 | if (ok == NOTIFY_STOP) |
| 284 | alg = crypto_larval_wait(larval); |
| 285 | else { |
| 286 | crypto_mod_put(larval); |
| 287 | alg = ERR_PTR(-ENOENT); |
| 288 | } |
| 289 | crypto_larval_kill(larval); |
| 290 | return alg; |
| 291 | } |
| 292 | EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup); |
| 293 | |
| 294 | static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask) |
| 295 | { |
| 296 | const struct crypto_type *type_obj = tfm->__crt_alg->cra_type; |
| 297 | |
| 298 | if (type_obj) |
| 299 | return type_obj->init(tfm, type, mask); |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 300 | return 0; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 301 | } |
| 302 | |
| 303 | static void crypto_exit_ops(struct crypto_tfm *tfm) |
| 304 | { |
| 305 | const struct crypto_type *type = tfm->__crt_alg->cra_type; |
| 306 | |
| 307 | if (type && tfm->exit) |
| 308 | tfm->exit(tfm); |
| 309 | } |
| 310 | |
| 311 | static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask) |
| 312 | { |
| 313 | const struct crypto_type *type_obj = alg->cra_type; |
| 314 | unsigned int len; |
| 315 | |
| 316 | len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1); |
| 317 | if (type_obj) |
| 318 | return len + type_obj->ctxsize(alg, type, mask); |
| 319 | |
| 320 | switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { |
| 321 | default: |
| 322 | BUG(); |
| 323 | |
| 324 | case CRYPTO_ALG_TYPE_CIPHER: |
| 325 | len += crypto_cipher_ctxsize(alg); |
| 326 | break; |
| 327 | |
| 328 | case CRYPTO_ALG_TYPE_COMPRESS: |
| 329 | len += crypto_compress_ctxsize(alg); |
| 330 | break; |
| 331 | } |
| 332 | |
| 333 | return len; |
| 334 | } |
| 335 | |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 336 | void crypto_shoot_alg(struct crypto_alg *alg) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 337 | { |
| 338 | down_write(&crypto_alg_sem); |
| 339 | alg->cra_flags |= CRYPTO_ALG_DYING; |
| 340 | up_write(&crypto_alg_sem); |
| 341 | } |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 342 | EXPORT_SYMBOL_GPL(crypto_shoot_alg); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 343 | |
| 344 | struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, |
| 345 | u32 mask) |
| 346 | { |
| 347 | struct crypto_tfm *tfm = NULL; |
| 348 | unsigned int tfm_size; |
| 349 | int err = -ENOMEM; |
| 350 | |
| 351 | tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask); |
| 352 | tfm = kzalloc(tfm_size, GFP_KERNEL); |
| 353 | if (tfm == NULL) |
| 354 | goto out_err; |
| 355 | |
| 356 | tfm->__crt_alg = alg; |
| 357 | |
| 358 | err = crypto_init_ops(tfm, type, mask); |
| 359 | if (err) |
| 360 | goto out_free_tfm; |
| 361 | |
| 362 | if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm))) |
| 363 | goto cra_init_failed; |
| 364 | |
| 365 | goto out; |
| 366 | |
| 367 | cra_init_failed: |
| 368 | crypto_exit_ops(tfm); |
| 369 | out_free_tfm: |
| 370 | if (err == -EAGAIN) |
| 371 | crypto_shoot_alg(alg); |
| 372 | kfree(tfm); |
| 373 | out_err: |
| 374 | tfm = ERR_PTR(err); |
| 375 | out: |
| 376 | return tfm; |
| 377 | } |
| 378 | EXPORT_SYMBOL_GPL(__crypto_alloc_tfm); |
| 379 | |
| 380 | /* |
| 381 | * crypto_alloc_base - Locate algorithm and allocate transform |
| 382 | * @alg_name: Name of algorithm |
| 383 | * @type: Type of algorithm |
| 384 | * @mask: Mask for type comparison |
| 385 | * |
| 386 | * This function should not be used by new algorithm types. |
| 387 | * Please use crypto_alloc_tfm instead. |
| 388 | * |
| 389 | * crypto_alloc_base() will first attempt to locate an already loaded |
| 390 | * algorithm. If that fails and the kernel supports dynamically loadable |
| 391 | * modules, it will then attempt to load a module of the same name or |
| 392 | * alias. If that fails it will send a query to any loaded crypto manager |
| 393 | * to construct an algorithm on the fly. A refcount is grabbed on the |
| 394 | * algorithm which is then associated with the new transform. |
| 395 | * |
| 396 | * The returned transform is of a non-determinate type. Most people |
| 397 | * should use one of the more specific allocation functions such as |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 398 | * crypto_alloc_skcipher(). |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 399 | * |
| 400 | * In case of error the return value is an error pointer. |
| 401 | */ |
| 402 | struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask) |
| 403 | { |
| 404 | struct crypto_tfm *tfm; |
| 405 | int err; |
| 406 | |
| 407 | for (;;) { |
| 408 | struct crypto_alg *alg; |
| 409 | |
| 410 | alg = crypto_alg_mod_lookup(alg_name, type, mask); |
| 411 | if (IS_ERR(alg)) { |
| 412 | err = PTR_ERR(alg); |
| 413 | goto err; |
| 414 | } |
| 415 | |
| 416 | tfm = __crypto_alloc_tfm(alg, type, mask); |
| 417 | if (!IS_ERR(tfm)) |
| 418 | return tfm; |
| 419 | |
| 420 | crypto_mod_put(alg); |
| 421 | err = PTR_ERR(tfm); |
| 422 | |
| 423 | err: |
| 424 | if (err != -EAGAIN) |
| 425 | break; |
| 426 | if (fatal_signal_pending(current)) { |
| 427 | err = -EINTR; |
| 428 | break; |
| 429 | } |
| 430 | } |
| 431 | |
| 432 | return ERR_PTR(err); |
| 433 | } |
| 434 | EXPORT_SYMBOL_GPL(crypto_alloc_base); |
| 435 | |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 436 | void *crypto_create_tfm_node(struct crypto_alg *alg, |
| 437 | const struct crypto_type *frontend, |
| 438 | int node) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 439 | { |
| 440 | char *mem; |
| 441 | struct crypto_tfm *tfm = NULL; |
| 442 | unsigned int tfmsize; |
| 443 | unsigned int total; |
| 444 | int err = -ENOMEM; |
| 445 | |
| 446 | tfmsize = frontend->tfmsize; |
| 447 | total = tfmsize + sizeof(*tfm) + frontend->extsize(alg); |
| 448 | |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 449 | mem = kzalloc_node(total, GFP_KERNEL, node); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 450 | if (mem == NULL) |
| 451 | goto out_err; |
| 452 | |
| 453 | tfm = (struct crypto_tfm *)(mem + tfmsize); |
| 454 | tfm->__crt_alg = alg; |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 455 | tfm->node = node; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 456 | |
| 457 | err = frontend->init_tfm(tfm); |
| 458 | if (err) |
| 459 | goto out_free_tfm; |
| 460 | |
| 461 | if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm))) |
| 462 | goto cra_init_failed; |
| 463 | |
| 464 | goto out; |
| 465 | |
| 466 | cra_init_failed: |
| 467 | crypto_exit_ops(tfm); |
| 468 | out_free_tfm: |
| 469 | if (err == -EAGAIN) |
| 470 | crypto_shoot_alg(alg); |
| 471 | kfree(mem); |
| 472 | out_err: |
| 473 | mem = ERR_PTR(err); |
| 474 | out: |
| 475 | return mem; |
| 476 | } |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 477 | EXPORT_SYMBOL_GPL(crypto_create_tfm_node); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 478 | |
| 479 | struct crypto_alg *crypto_find_alg(const char *alg_name, |
| 480 | const struct crypto_type *frontend, |
| 481 | u32 type, u32 mask) |
| 482 | { |
| 483 | if (frontend) { |
| 484 | type &= frontend->maskclear; |
| 485 | mask &= frontend->maskclear; |
| 486 | type |= frontend->type; |
| 487 | mask |= frontend->maskset; |
| 488 | } |
| 489 | |
| 490 | return crypto_alg_mod_lookup(alg_name, type, mask); |
| 491 | } |
| 492 | EXPORT_SYMBOL_GPL(crypto_find_alg); |
| 493 | |
| 494 | /* |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 495 | * crypto_alloc_tfm_node - Locate algorithm and allocate transform |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 496 | * @alg_name: Name of algorithm |
| 497 | * @frontend: Frontend algorithm type |
| 498 | * @type: Type of algorithm |
| 499 | * @mask: Mask for type comparison |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 500 | * @node: NUMA node in which users desire to put requests, if node is |
| 501 | * NUMA_NO_NODE, it means users have no special requirement. |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 502 | * |
| 503 | * crypto_alloc_tfm() will first attempt to locate an already loaded |
| 504 | * algorithm. If that fails and the kernel supports dynamically loadable |
| 505 | * modules, it will then attempt to load a module of the same name or |
| 506 | * alias. If that fails it will send a query to any loaded crypto manager |
| 507 | * to construct an algorithm on the fly. A refcount is grabbed on the |
| 508 | * algorithm which is then associated with the new transform. |
| 509 | * |
| 510 | * The returned transform is of a non-determinate type. Most people |
| 511 | * should use one of the more specific allocation functions such as |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 512 | * crypto_alloc_skcipher(). |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 513 | * |
| 514 | * In case of error the return value is an error pointer. |
| 515 | */ |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 516 | |
| 517 | void *crypto_alloc_tfm_node(const char *alg_name, |
| 518 | const struct crypto_type *frontend, u32 type, u32 mask, |
| 519 | int node) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 520 | { |
| 521 | void *tfm; |
| 522 | int err; |
| 523 | |
| 524 | for (;;) { |
| 525 | struct crypto_alg *alg; |
| 526 | |
| 527 | alg = crypto_find_alg(alg_name, frontend, type, mask); |
| 528 | if (IS_ERR(alg)) { |
| 529 | err = PTR_ERR(alg); |
| 530 | goto err; |
| 531 | } |
| 532 | |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 533 | tfm = crypto_create_tfm_node(alg, frontend, node); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 534 | if (!IS_ERR(tfm)) |
| 535 | return tfm; |
| 536 | |
| 537 | crypto_mod_put(alg); |
| 538 | err = PTR_ERR(tfm); |
| 539 | |
| 540 | err: |
| 541 | if (err != -EAGAIN) |
| 542 | break; |
| 543 | if (fatal_signal_pending(current)) { |
| 544 | err = -EINTR; |
| 545 | break; |
| 546 | } |
| 547 | } |
| 548 | |
| 549 | return ERR_PTR(err); |
| 550 | } |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 551 | EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 552 | |
| 553 | /* |
| 554 | * crypto_destroy_tfm - Free crypto transform |
| 555 | * @mem: Start of tfm slab |
| 556 | * @tfm: Transform to free |
| 557 | * |
| 558 | * This function frees up the transform and any associated resources, |
| 559 | * then drops the refcount on the associated algorithm. |
| 560 | */ |
| 561 | void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm) |
| 562 | { |
| 563 | struct crypto_alg *alg; |
| 564 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame] | 565 | if (IS_ERR_OR_NULL(mem)) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 566 | return; |
| 567 | |
| 568 | alg = tfm->__crt_alg; |
| 569 | |
| 570 | if (!tfm->exit && alg->cra_exit) |
| 571 | alg->cra_exit(tfm); |
| 572 | crypto_exit_ops(tfm); |
| 573 | crypto_mod_put(alg); |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 574 | kfree_sensitive(mem); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 575 | } |
| 576 | EXPORT_SYMBOL_GPL(crypto_destroy_tfm); |
| 577 | |
| 578 | int crypto_has_alg(const char *name, u32 type, u32 mask) |
| 579 | { |
| 580 | int ret = 0; |
| 581 | struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask); |
| 582 | |
| 583 | if (!IS_ERR(alg)) { |
| 584 | crypto_mod_put(alg); |
| 585 | ret = 1; |
| 586 | } |
| 587 | |
| 588 | return ret; |
| 589 | } |
| 590 | EXPORT_SYMBOL_GPL(crypto_has_alg); |
| 591 | |
| 592 | void crypto_req_done(struct crypto_async_request *req, int err) |
| 593 | { |
| 594 | struct crypto_wait *wait = req->data; |
| 595 | |
| 596 | if (err == -EINPROGRESS) |
| 597 | return; |
| 598 | |
| 599 | wait->err = err; |
| 600 | complete(&wait->completion); |
| 601 | } |
| 602 | EXPORT_SYMBOL_GPL(crypto_req_done); |
| 603 | |
| 604 | MODULE_DESCRIPTION("Cryptographic core API"); |
| 605 | MODULE_LICENSE("GPL"); |