Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | /* |
| 3 | * Software async crypto daemon |
| 4 | * |
| 5 | * Added AEAD support to cryptd. |
| 6 | * Authors: Tadeusz Struk (tadeusz.struk@intel.com) |
| 7 | * Adrian Hoban <adrian.hoban@intel.com> |
| 8 | * Gabriele Paoloni <gabriele.paoloni@intel.com> |
| 9 | * Aidan O'Mahony (aidan.o.mahony@intel.com) |
| 10 | * Copyright (c) 2010, Intel Corporation. |
| 11 | */ |
| 12 | |
| 13 | #ifndef _CRYPTO_CRYPT_H |
| 14 | #define _CRYPTO_CRYPT_H |
| 15 | |
| 16 | #include <linux/kernel.h> |
| 17 | #include <crypto/aead.h> |
| 18 | #include <crypto/hash.h> |
| 19 | #include <crypto/skcipher.h> |
| 20 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 21 | struct cryptd_skcipher { |
| 22 | struct crypto_skcipher base; |
| 23 | }; |
| 24 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 25 | /* alg_name should be algorithm to be cryptd-ed */ |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 26 | struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name, |
| 27 | u32 type, u32 mask); |
| 28 | struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm); |
| 29 | /* Must be called without moving CPUs. */ |
| 30 | bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm); |
| 31 | void cryptd_free_skcipher(struct cryptd_skcipher *tfm); |
| 32 | |
| 33 | struct cryptd_ahash { |
| 34 | struct crypto_ahash base; |
| 35 | }; |
| 36 | |
| 37 | static inline struct cryptd_ahash *__cryptd_ahash_cast( |
| 38 | struct crypto_ahash *tfm) |
| 39 | { |
| 40 | return (struct cryptd_ahash *)tfm; |
| 41 | } |
| 42 | |
| 43 | /* alg_name should be algorithm to be cryptd-ed */ |
| 44 | struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name, |
| 45 | u32 type, u32 mask); |
| 46 | struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm); |
| 47 | struct shash_desc *cryptd_shash_desc(struct ahash_request *req); |
| 48 | /* Must be called without moving CPUs. */ |
| 49 | bool cryptd_ahash_queued(struct cryptd_ahash *tfm); |
| 50 | void cryptd_free_ahash(struct cryptd_ahash *tfm); |
| 51 | |
| 52 | struct cryptd_aead { |
| 53 | struct crypto_aead base; |
| 54 | }; |
| 55 | |
| 56 | static inline struct cryptd_aead *__cryptd_aead_cast( |
| 57 | struct crypto_aead *tfm) |
| 58 | { |
| 59 | return (struct cryptd_aead *)tfm; |
| 60 | } |
| 61 | |
| 62 | struct cryptd_aead *cryptd_alloc_aead(const char *alg_name, |
| 63 | u32 type, u32 mask); |
| 64 | |
| 65 | struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm); |
| 66 | /* Must be called without moving CPUs. */ |
| 67 | bool cryptd_aead_queued(struct cryptd_aead *tfm); |
| 68 | |
| 69 | void cryptd_free_aead(struct cryptd_aead *tfm); |
| 70 | |
| 71 | #endif |