David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2 | /* |
| 3 | * CTR: Counter mode |
| 4 | * |
| 5 | * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com> |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 6 | */ |
| 7 | |
| 8 | #include <crypto/algapi.h> |
| 9 | #include <crypto/ctr.h> |
| 10 | #include <crypto/internal/skcipher.h> |
| 11 | #include <linux/err.h> |
| 12 | #include <linux/init.h> |
| 13 | #include <linux/kernel.h> |
| 14 | #include <linux/module.h> |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 15 | #include <linux/slab.h> |
| 16 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 17 | struct crypto_rfc3686_ctx { |
| 18 | struct crypto_skcipher *child; |
| 19 | u8 nonce[CTR_RFC3686_NONCE_SIZE]; |
| 20 | }; |
| 21 | |
| 22 | struct crypto_rfc3686_req_ctx { |
| 23 | u8 iv[CTR_RFC3686_BLOCK_SIZE]; |
| 24 | struct skcipher_request subreq CRYPTO_MINALIGN_ATTR; |
| 25 | }; |
| 26 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 27 | static void crypto_ctr_crypt_final(struct skcipher_walk *walk, |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 28 | struct crypto_cipher *tfm) |
| 29 | { |
| 30 | unsigned int bsize = crypto_cipher_blocksize(tfm); |
| 31 | unsigned long alignmask = crypto_cipher_alignmask(tfm); |
| 32 | u8 *ctrblk = walk->iv; |
| 33 | u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK]; |
| 34 | u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); |
| 35 | u8 *src = walk->src.virt.addr; |
| 36 | u8 *dst = walk->dst.virt.addr; |
| 37 | unsigned int nbytes = walk->nbytes; |
| 38 | |
| 39 | crypto_cipher_encrypt_one(tfm, keystream, ctrblk); |
| 40 | crypto_xor_cpy(dst, keystream, src, nbytes); |
| 41 | |
| 42 | crypto_inc(ctrblk, bsize); |
| 43 | } |
| 44 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 45 | static int crypto_ctr_crypt_segment(struct skcipher_walk *walk, |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 46 | struct crypto_cipher *tfm) |
| 47 | { |
| 48 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = |
| 49 | crypto_cipher_alg(tfm)->cia_encrypt; |
| 50 | unsigned int bsize = crypto_cipher_blocksize(tfm); |
| 51 | u8 *ctrblk = walk->iv; |
| 52 | u8 *src = walk->src.virt.addr; |
| 53 | u8 *dst = walk->dst.virt.addr; |
| 54 | unsigned int nbytes = walk->nbytes; |
| 55 | |
| 56 | do { |
| 57 | /* create keystream */ |
| 58 | fn(crypto_cipher_tfm(tfm), dst, ctrblk); |
| 59 | crypto_xor(dst, src, bsize); |
| 60 | |
| 61 | /* increment counter in counterblock */ |
| 62 | crypto_inc(ctrblk, bsize); |
| 63 | |
| 64 | src += bsize; |
| 65 | dst += bsize; |
| 66 | } while ((nbytes -= bsize) >= bsize); |
| 67 | |
| 68 | return nbytes; |
| 69 | } |
| 70 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 71 | static int crypto_ctr_crypt_inplace(struct skcipher_walk *walk, |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 72 | struct crypto_cipher *tfm) |
| 73 | { |
| 74 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = |
| 75 | crypto_cipher_alg(tfm)->cia_encrypt; |
| 76 | unsigned int bsize = crypto_cipher_blocksize(tfm); |
| 77 | unsigned long alignmask = crypto_cipher_alignmask(tfm); |
| 78 | unsigned int nbytes = walk->nbytes; |
| 79 | u8 *ctrblk = walk->iv; |
| 80 | u8 *src = walk->src.virt.addr; |
| 81 | u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK]; |
| 82 | u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); |
| 83 | |
| 84 | do { |
| 85 | /* create keystream */ |
| 86 | fn(crypto_cipher_tfm(tfm), keystream, ctrblk); |
| 87 | crypto_xor(src, keystream, bsize); |
| 88 | |
| 89 | /* increment counter in counterblock */ |
| 90 | crypto_inc(ctrblk, bsize); |
| 91 | |
| 92 | src += bsize; |
| 93 | } while ((nbytes -= bsize) >= bsize); |
| 94 | |
| 95 | return nbytes; |
| 96 | } |
| 97 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 98 | static int crypto_ctr_crypt(struct skcipher_request *req) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 99 | { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 100 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
| 101 | struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); |
| 102 | const unsigned int bsize = crypto_cipher_blocksize(cipher); |
| 103 | struct skcipher_walk walk; |
| 104 | unsigned int nbytes; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 105 | int err; |
| 106 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 107 | err = skcipher_walk_virt(&walk, req, false); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 108 | |
| 109 | while (walk.nbytes >= bsize) { |
| 110 | if (walk.src.virt.addr == walk.dst.virt.addr) |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 111 | nbytes = crypto_ctr_crypt_inplace(&walk, cipher); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 112 | else |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 113 | nbytes = crypto_ctr_crypt_segment(&walk, cipher); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 114 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 115 | err = skcipher_walk_done(&walk, nbytes); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 116 | } |
| 117 | |
| 118 | if (walk.nbytes) { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 119 | crypto_ctr_crypt_final(&walk, cipher); |
| 120 | err = skcipher_walk_done(&walk, 0); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 121 | } |
| 122 | |
| 123 | return err; |
| 124 | } |
| 125 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 126 | static int crypto_ctr_create(struct crypto_template *tmpl, struct rtattr **tb) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 127 | { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 128 | struct skcipher_instance *inst; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 129 | struct crypto_alg *alg; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 130 | int err; |
| 131 | |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 132 | inst = skcipher_alloc_instance_simple(tmpl, tb); |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 133 | if (IS_ERR(inst)) |
| 134 | return PTR_ERR(inst); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 135 | |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 136 | alg = skcipher_ialg_simple(inst); |
| 137 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 138 | /* Block size must be >= 4 bytes. */ |
| 139 | err = -EINVAL; |
| 140 | if (alg->cra_blocksize < 4) |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 141 | goto out_free_inst; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 142 | |
| 143 | /* If this is false we'd fail the alignment of crypto_inc. */ |
| 144 | if (alg->cra_blocksize % 4) |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 145 | goto out_free_inst; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 146 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 147 | /* CTR mode is a stream cipher. */ |
| 148 | inst->alg.base.cra_blocksize = 1; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 149 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 150 | /* |
| 151 | * To simplify the implementation, configure the skcipher walk to only |
| 152 | * give a partial block at the very end, never earlier. |
| 153 | */ |
| 154 | inst->alg.chunksize = alg->cra_blocksize; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 155 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 156 | inst->alg.encrypt = crypto_ctr_crypt; |
| 157 | inst->alg.decrypt = crypto_ctr_crypt; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 158 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 159 | err = skcipher_register_instance(tmpl, inst); |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 160 | if (err) { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 161 | out_free_inst: |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 162 | inst->free(inst); |
| 163 | } |
| 164 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 165 | return err; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 166 | } |
| 167 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 168 | static int crypto_rfc3686_setkey(struct crypto_skcipher *parent, |
| 169 | const u8 *key, unsigned int keylen) |
| 170 | { |
| 171 | struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(parent); |
| 172 | struct crypto_skcipher *child = ctx->child; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 173 | |
| 174 | /* the nonce is stored in bytes at end of key */ |
| 175 | if (keylen < CTR_RFC3686_NONCE_SIZE) |
| 176 | return -EINVAL; |
| 177 | |
| 178 | memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE), |
| 179 | CTR_RFC3686_NONCE_SIZE); |
| 180 | |
| 181 | keylen -= CTR_RFC3686_NONCE_SIZE; |
| 182 | |
| 183 | crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); |
| 184 | crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) & |
| 185 | CRYPTO_TFM_REQ_MASK); |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 186 | return crypto_skcipher_setkey(child, key, keylen); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 187 | } |
| 188 | |
| 189 | static int crypto_rfc3686_crypt(struct skcipher_request *req) |
| 190 | { |
| 191 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
| 192 | struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm); |
| 193 | struct crypto_skcipher *child = ctx->child; |
| 194 | unsigned long align = crypto_skcipher_alignmask(tfm); |
| 195 | struct crypto_rfc3686_req_ctx *rctx = |
| 196 | (void *)PTR_ALIGN((u8 *)skcipher_request_ctx(req), align + 1); |
| 197 | struct skcipher_request *subreq = &rctx->subreq; |
| 198 | u8 *iv = rctx->iv; |
| 199 | |
| 200 | /* set up counter block */ |
| 201 | memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE); |
| 202 | memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE); |
| 203 | |
| 204 | /* initialize counter portion of counter block */ |
| 205 | *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) = |
| 206 | cpu_to_be32(1); |
| 207 | |
| 208 | skcipher_request_set_tfm(subreq, child); |
| 209 | skcipher_request_set_callback(subreq, req->base.flags, |
| 210 | req->base.complete, req->base.data); |
| 211 | skcipher_request_set_crypt(subreq, req->src, req->dst, |
| 212 | req->cryptlen, iv); |
| 213 | |
| 214 | return crypto_skcipher_encrypt(subreq); |
| 215 | } |
| 216 | |
| 217 | static int crypto_rfc3686_init_tfm(struct crypto_skcipher *tfm) |
| 218 | { |
| 219 | struct skcipher_instance *inst = skcipher_alg_instance(tfm); |
| 220 | struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst); |
| 221 | struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm); |
| 222 | struct crypto_skcipher *cipher; |
| 223 | unsigned long align; |
| 224 | unsigned int reqsize; |
| 225 | |
| 226 | cipher = crypto_spawn_skcipher(spawn); |
| 227 | if (IS_ERR(cipher)) |
| 228 | return PTR_ERR(cipher); |
| 229 | |
| 230 | ctx->child = cipher; |
| 231 | |
| 232 | align = crypto_skcipher_alignmask(tfm); |
| 233 | align &= ~(crypto_tfm_ctx_alignment() - 1); |
| 234 | reqsize = align + sizeof(struct crypto_rfc3686_req_ctx) + |
| 235 | crypto_skcipher_reqsize(cipher); |
| 236 | crypto_skcipher_set_reqsize(tfm, reqsize); |
| 237 | |
| 238 | return 0; |
| 239 | } |
| 240 | |
| 241 | static void crypto_rfc3686_exit_tfm(struct crypto_skcipher *tfm) |
| 242 | { |
| 243 | struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm); |
| 244 | |
| 245 | crypto_free_skcipher(ctx->child); |
| 246 | } |
| 247 | |
| 248 | static void crypto_rfc3686_free(struct skcipher_instance *inst) |
| 249 | { |
| 250 | struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst); |
| 251 | |
| 252 | crypto_drop_skcipher(spawn); |
| 253 | kfree(inst); |
| 254 | } |
| 255 | |
| 256 | static int crypto_rfc3686_create(struct crypto_template *tmpl, |
| 257 | struct rtattr **tb) |
| 258 | { |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 259 | struct skcipher_instance *inst; |
| 260 | struct skcipher_alg *alg; |
| 261 | struct crypto_skcipher_spawn *spawn; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 262 | u32 mask; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 263 | int err; |
| 264 | |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 265 | err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask); |
| 266 | if (err) |
| 267 | return err; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 268 | |
| 269 | inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); |
| 270 | if (!inst) |
| 271 | return -ENOMEM; |
| 272 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 273 | spawn = skcipher_instance_ctx(inst); |
| 274 | |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 275 | err = crypto_grab_skcipher(spawn, skcipher_crypto_instance(inst), |
| 276 | crypto_attr_alg_name(tb[1]), 0, mask); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 277 | if (err) |
| 278 | goto err_free_inst; |
| 279 | |
| 280 | alg = crypto_spawn_skcipher_alg(spawn); |
| 281 | |
| 282 | /* We only support 16-byte blocks. */ |
| 283 | err = -EINVAL; |
| 284 | if (crypto_skcipher_alg_ivsize(alg) != CTR_RFC3686_BLOCK_SIZE) |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 285 | goto err_free_inst; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 286 | |
| 287 | /* Not a stream cipher? */ |
| 288 | if (alg->base.cra_blocksize != 1) |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 289 | goto err_free_inst; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 290 | |
| 291 | err = -ENAMETOOLONG; |
| 292 | if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, |
| 293 | "rfc3686(%s)", alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME) |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 294 | goto err_free_inst; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 295 | if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, |
| 296 | "rfc3686(%s)", alg->base.cra_driver_name) >= |
| 297 | CRYPTO_MAX_ALG_NAME) |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 298 | goto err_free_inst; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 299 | |
| 300 | inst->alg.base.cra_priority = alg->base.cra_priority; |
| 301 | inst->alg.base.cra_blocksize = 1; |
| 302 | inst->alg.base.cra_alignmask = alg->base.cra_alignmask; |
| 303 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 304 | inst->alg.ivsize = CTR_RFC3686_IV_SIZE; |
| 305 | inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg); |
| 306 | inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) + |
| 307 | CTR_RFC3686_NONCE_SIZE; |
| 308 | inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) + |
| 309 | CTR_RFC3686_NONCE_SIZE; |
| 310 | |
| 311 | inst->alg.setkey = crypto_rfc3686_setkey; |
| 312 | inst->alg.encrypt = crypto_rfc3686_crypt; |
| 313 | inst->alg.decrypt = crypto_rfc3686_crypt; |
| 314 | |
| 315 | inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx); |
| 316 | |
| 317 | inst->alg.init = crypto_rfc3686_init_tfm; |
| 318 | inst->alg.exit = crypto_rfc3686_exit_tfm; |
| 319 | |
| 320 | inst->free = crypto_rfc3686_free; |
| 321 | |
| 322 | err = skcipher_register_instance(tmpl, inst); |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 323 | if (err) { |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 324 | err_free_inst: |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 325 | crypto_rfc3686_free(inst); |
| 326 | } |
| 327 | return err; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 328 | } |
| 329 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 330 | static struct crypto_template crypto_ctr_tmpls[] = { |
| 331 | { |
| 332 | .name = "ctr", |
| 333 | .create = crypto_ctr_create, |
| 334 | .module = THIS_MODULE, |
| 335 | }, { |
| 336 | .name = "rfc3686", |
| 337 | .create = crypto_rfc3686_create, |
| 338 | .module = THIS_MODULE, |
| 339 | }, |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 340 | }; |
| 341 | |
| 342 | static int __init crypto_ctr_module_init(void) |
| 343 | { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 344 | return crypto_register_templates(crypto_ctr_tmpls, |
| 345 | ARRAY_SIZE(crypto_ctr_tmpls)); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 346 | } |
| 347 | |
| 348 | static void __exit crypto_ctr_module_exit(void) |
| 349 | { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 350 | crypto_unregister_templates(crypto_ctr_tmpls, |
| 351 | ARRAY_SIZE(crypto_ctr_tmpls)); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 352 | } |
| 353 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 354 | subsys_initcall(crypto_ctr_module_init); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 355 | module_exit(crypto_ctr_module_exit); |
| 356 | |
| 357 | MODULE_LICENSE("GPL"); |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 358 | MODULE_DESCRIPTION("CTR block cipher mode of operation"); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 359 | MODULE_ALIAS_CRYPTO("rfc3686"); |
| 360 | MODULE_ALIAS_CRYPTO("ctr"); |