David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0-or-later */ |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2 | /* |
| 3 | * Symmetric key ciphers. |
| 4 | * |
| 5 | * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 6 | */ |
| 7 | |
| 8 | #ifndef _CRYPTO_INTERNAL_SKCIPHER_H |
| 9 | #define _CRYPTO_INTERNAL_SKCIPHER_H |
| 10 | |
| 11 | #include <crypto/algapi.h> |
| 12 | #include <crypto/skcipher.h> |
| 13 | #include <linux/list.h> |
| 14 | #include <linux/types.h> |
| 15 | |
| 16 | struct aead_request; |
| 17 | struct rtattr; |
| 18 | |
| 19 | struct skcipher_instance { |
| 20 | void (*free)(struct skcipher_instance *inst); |
| 21 | union { |
| 22 | struct { |
| 23 | char head[offsetof(struct skcipher_alg, base)]; |
| 24 | struct crypto_instance base; |
| 25 | } s; |
| 26 | struct skcipher_alg alg; |
| 27 | }; |
| 28 | }; |
| 29 | |
| 30 | struct crypto_skcipher_spawn { |
| 31 | struct crypto_spawn base; |
| 32 | }; |
| 33 | |
| 34 | struct skcipher_walk { |
| 35 | union { |
| 36 | struct { |
| 37 | struct page *page; |
| 38 | unsigned long offset; |
| 39 | } phys; |
| 40 | |
| 41 | struct { |
| 42 | u8 *page; |
| 43 | void *addr; |
| 44 | } virt; |
| 45 | } src, dst; |
| 46 | |
| 47 | struct scatter_walk in; |
| 48 | unsigned int nbytes; |
| 49 | |
| 50 | struct scatter_walk out; |
| 51 | unsigned int total; |
| 52 | |
| 53 | struct list_head buffers; |
| 54 | |
| 55 | u8 *page; |
| 56 | u8 *buffer; |
| 57 | u8 *oiv; |
| 58 | void *iv; |
| 59 | |
| 60 | unsigned int ivsize; |
| 61 | |
| 62 | int flags; |
| 63 | unsigned int blocksize; |
| 64 | unsigned int stride; |
| 65 | unsigned int alignmask; |
| 66 | }; |
| 67 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 68 | static inline struct crypto_instance *skcipher_crypto_instance( |
| 69 | struct skcipher_instance *inst) |
| 70 | { |
| 71 | return &inst->s.base; |
| 72 | } |
| 73 | |
| 74 | static inline struct skcipher_instance *skcipher_alg_instance( |
| 75 | struct crypto_skcipher *skcipher) |
| 76 | { |
| 77 | return container_of(crypto_skcipher_alg(skcipher), |
| 78 | struct skcipher_instance, alg); |
| 79 | } |
| 80 | |
| 81 | static inline void *skcipher_instance_ctx(struct skcipher_instance *inst) |
| 82 | { |
| 83 | return crypto_instance_ctx(skcipher_crypto_instance(inst)); |
| 84 | } |
| 85 | |
| 86 | static inline void skcipher_request_complete(struct skcipher_request *req, int err) |
| 87 | { |
| 88 | req->base.complete(&req->base, err); |
| 89 | } |
| 90 | |
| 91 | static inline void crypto_set_skcipher_spawn( |
| 92 | struct crypto_skcipher_spawn *spawn, struct crypto_instance *inst) |
| 93 | { |
| 94 | crypto_set_spawn(&spawn->base, inst); |
| 95 | } |
| 96 | |
| 97 | int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name, |
| 98 | u32 type, u32 mask); |
| 99 | |
| 100 | static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn) |
| 101 | { |
| 102 | crypto_drop_spawn(&spawn->base); |
| 103 | } |
| 104 | |
| 105 | static inline struct skcipher_alg *crypto_skcipher_spawn_alg( |
| 106 | struct crypto_skcipher_spawn *spawn) |
| 107 | { |
| 108 | return container_of(spawn->base.alg, struct skcipher_alg, base); |
| 109 | } |
| 110 | |
| 111 | static inline struct skcipher_alg *crypto_spawn_skcipher_alg( |
| 112 | struct crypto_skcipher_spawn *spawn) |
| 113 | { |
| 114 | return crypto_skcipher_spawn_alg(spawn); |
| 115 | } |
| 116 | |
| 117 | static inline struct crypto_skcipher *crypto_spawn_skcipher( |
| 118 | struct crypto_skcipher_spawn *spawn) |
| 119 | { |
| 120 | return crypto_spawn_tfm2(&spawn->base); |
| 121 | } |
| 122 | |
| 123 | static inline void crypto_skcipher_set_reqsize( |
| 124 | struct crypto_skcipher *skcipher, unsigned int reqsize) |
| 125 | { |
| 126 | skcipher->reqsize = reqsize; |
| 127 | } |
| 128 | |
| 129 | int crypto_register_skcipher(struct skcipher_alg *alg); |
| 130 | void crypto_unregister_skcipher(struct skcipher_alg *alg); |
| 131 | int crypto_register_skciphers(struct skcipher_alg *algs, int count); |
| 132 | void crypto_unregister_skciphers(struct skcipher_alg *algs, int count); |
| 133 | int skcipher_register_instance(struct crypto_template *tmpl, |
| 134 | struct skcipher_instance *inst); |
| 135 | |
| 136 | int skcipher_walk_done(struct skcipher_walk *walk, int err); |
| 137 | int skcipher_walk_virt(struct skcipher_walk *walk, |
| 138 | struct skcipher_request *req, |
| 139 | bool atomic); |
| 140 | void skcipher_walk_atomise(struct skcipher_walk *walk); |
| 141 | int skcipher_walk_async(struct skcipher_walk *walk, |
| 142 | struct skcipher_request *req); |
| 143 | int skcipher_walk_aead(struct skcipher_walk *walk, struct aead_request *req, |
| 144 | bool atomic); |
| 145 | int skcipher_walk_aead_encrypt(struct skcipher_walk *walk, |
| 146 | struct aead_request *req, bool atomic); |
| 147 | int skcipher_walk_aead_decrypt(struct skcipher_walk *walk, |
| 148 | struct aead_request *req, bool atomic); |
| 149 | void skcipher_walk_complete(struct skcipher_walk *walk, int err); |
| 150 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 151 | static inline void skcipher_walk_abort(struct skcipher_walk *walk) |
| 152 | { |
| 153 | skcipher_walk_done(walk, -ECANCELED); |
| 154 | } |
| 155 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 156 | static inline void ablkcipher_request_complete(struct ablkcipher_request *req, |
| 157 | int err) |
| 158 | { |
| 159 | req->base.complete(&req->base, err); |
| 160 | } |
| 161 | |
| 162 | static inline u32 ablkcipher_request_flags(struct ablkcipher_request *req) |
| 163 | { |
| 164 | return req->base.flags; |
| 165 | } |
| 166 | |
| 167 | static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm) |
| 168 | { |
| 169 | return crypto_tfm_ctx(&tfm->base); |
| 170 | } |
| 171 | |
| 172 | static inline void *skcipher_request_ctx(struct skcipher_request *req) |
| 173 | { |
| 174 | return req->__ctx; |
| 175 | } |
| 176 | |
| 177 | static inline u32 skcipher_request_flags(struct skcipher_request *req) |
| 178 | { |
| 179 | return req->base.flags; |
| 180 | } |
| 181 | |
| 182 | static inline unsigned int crypto_skcipher_alg_min_keysize( |
| 183 | struct skcipher_alg *alg) |
| 184 | { |
| 185 | if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) == |
| 186 | CRYPTO_ALG_TYPE_BLKCIPHER) |
| 187 | return alg->base.cra_blkcipher.min_keysize; |
| 188 | |
| 189 | if (alg->base.cra_ablkcipher.encrypt) |
| 190 | return alg->base.cra_ablkcipher.min_keysize; |
| 191 | |
| 192 | return alg->min_keysize; |
| 193 | } |
| 194 | |
| 195 | static inline unsigned int crypto_skcipher_alg_max_keysize( |
| 196 | struct skcipher_alg *alg) |
| 197 | { |
| 198 | if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) == |
| 199 | CRYPTO_ALG_TYPE_BLKCIPHER) |
| 200 | return alg->base.cra_blkcipher.max_keysize; |
| 201 | |
| 202 | if (alg->base.cra_ablkcipher.encrypt) |
| 203 | return alg->base.cra_ablkcipher.max_keysize; |
| 204 | |
| 205 | return alg->max_keysize; |
| 206 | } |
| 207 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 208 | static inline unsigned int crypto_skcipher_alg_walksize( |
| 209 | struct skcipher_alg *alg) |
| 210 | { |
| 211 | if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) == |
| 212 | CRYPTO_ALG_TYPE_BLKCIPHER) |
| 213 | return alg->base.cra_blocksize; |
| 214 | |
| 215 | if (alg->base.cra_ablkcipher.encrypt) |
| 216 | return alg->base.cra_blocksize; |
| 217 | |
| 218 | return alg->walksize; |
| 219 | } |
| 220 | |
| 221 | /** |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 222 | * crypto_skcipher_walksize() - obtain walk size |
| 223 | * @tfm: cipher handle |
| 224 | * |
| 225 | * In some cases, algorithms can only perform optimally when operating on |
| 226 | * multiple blocks in parallel. This is reflected by the walksize, which |
| 227 | * must be a multiple of the chunksize (or equal if the concern does not |
| 228 | * apply) |
| 229 | * |
| 230 | * Return: walk size in bytes |
| 231 | */ |
| 232 | static inline unsigned int crypto_skcipher_walksize( |
| 233 | struct crypto_skcipher *tfm) |
| 234 | { |
| 235 | return crypto_skcipher_alg_walksize(crypto_skcipher_alg(tfm)); |
| 236 | } |
| 237 | |
| 238 | /* Helpers for simple block cipher modes of operation */ |
| 239 | struct skcipher_ctx_simple { |
| 240 | struct crypto_cipher *cipher; /* underlying block cipher */ |
| 241 | }; |
| 242 | static inline struct crypto_cipher * |
| 243 | skcipher_cipher_simple(struct crypto_skcipher *tfm) |
| 244 | { |
| 245 | struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm); |
| 246 | |
| 247 | return ctx->cipher; |
| 248 | } |
| 249 | struct skcipher_instance * |
| 250 | skcipher_alloc_instance_simple(struct crypto_template *tmpl, struct rtattr **tb, |
| 251 | struct crypto_alg **cipher_alg_ret); |
| 252 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 253 | #endif /* _CRYPTO_INTERNAL_SKCIPHER_H */ |
| 254 | |