David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2 | /* |
| 3 | * seqiv: Sequence Number IV Generator |
| 4 | * |
| 5 | * This generator generates an IV based on a sequence number by xoring it |
| 6 | * with a salt. This algorithm is mainly useful for CTR and similar modes. |
| 7 | * |
| 8 | * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 9 | */ |
| 10 | |
| 11 | #include <crypto/internal/geniv.h> |
| 12 | #include <crypto/scatterwalk.h> |
| 13 | #include <crypto/skcipher.h> |
| 14 | #include <linux/err.h> |
| 15 | #include <linux/init.h> |
| 16 | #include <linux/kernel.h> |
| 17 | #include <linux/module.h> |
| 18 | #include <linux/slab.h> |
| 19 | #include <linux/string.h> |
| 20 | |
| 21 | static void seqiv_free(struct crypto_instance *inst); |
| 22 | |
| 23 | static void seqiv_aead_encrypt_complete2(struct aead_request *req, int err) |
| 24 | { |
| 25 | struct aead_request *subreq = aead_request_ctx(req); |
| 26 | struct crypto_aead *geniv; |
| 27 | |
| 28 | if (err == -EINPROGRESS) |
| 29 | return; |
| 30 | |
| 31 | if (err) |
| 32 | goto out; |
| 33 | |
| 34 | geniv = crypto_aead_reqtfm(req); |
| 35 | memcpy(req->iv, subreq->iv, crypto_aead_ivsize(geniv)); |
| 36 | |
| 37 | out: |
| 38 | kzfree(subreq->iv); |
| 39 | } |
| 40 | |
| 41 | static void seqiv_aead_encrypt_complete(struct crypto_async_request *base, |
| 42 | int err) |
| 43 | { |
| 44 | struct aead_request *req = base->data; |
| 45 | |
| 46 | seqiv_aead_encrypt_complete2(req, err); |
| 47 | aead_request_complete(req, err); |
| 48 | } |
| 49 | |
| 50 | static int seqiv_aead_encrypt(struct aead_request *req) |
| 51 | { |
| 52 | struct crypto_aead *geniv = crypto_aead_reqtfm(req); |
| 53 | struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv); |
| 54 | struct aead_request *subreq = aead_request_ctx(req); |
| 55 | crypto_completion_t compl; |
| 56 | void *data; |
| 57 | u8 *info; |
| 58 | unsigned int ivsize = 8; |
| 59 | int err; |
| 60 | |
| 61 | if (req->cryptlen < ivsize) |
| 62 | return -EINVAL; |
| 63 | |
| 64 | aead_request_set_tfm(subreq, ctx->child); |
| 65 | |
| 66 | compl = req->base.complete; |
| 67 | data = req->base.data; |
| 68 | info = req->iv; |
| 69 | |
| 70 | if (req->src != req->dst) { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 71 | SYNC_SKCIPHER_REQUEST_ON_STACK(nreq, ctx->sknull); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 72 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 73 | skcipher_request_set_sync_tfm(nreq, ctx->sknull); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 74 | skcipher_request_set_callback(nreq, req->base.flags, |
| 75 | NULL, NULL); |
| 76 | skcipher_request_set_crypt(nreq, req->src, req->dst, |
| 77 | req->assoclen + req->cryptlen, |
| 78 | NULL); |
| 79 | |
| 80 | err = crypto_skcipher_encrypt(nreq); |
| 81 | if (err) |
| 82 | return err; |
| 83 | } |
| 84 | |
| 85 | if (unlikely(!IS_ALIGNED((unsigned long)info, |
| 86 | crypto_aead_alignmask(geniv) + 1))) { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 87 | info = kmemdup(req->iv, ivsize, req->base.flags & |
| 88 | CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL : |
| 89 | GFP_ATOMIC); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 90 | if (!info) |
| 91 | return -ENOMEM; |
| 92 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 93 | compl = seqiv_aead_encrypt_complete; |
| 94 | data = req; |
| 95 | } |
| 96 | |
| 97 | aead_request_set_callback(subreq, req->base.flags, compl, data); |
| 98 | aead_request_set_crypt(subreq, req->dst, req->dst, |
| 99 | req->cryptlen - ivsize, info); |
| 100 | aead_request_set_ad(subreq, req->assoclen + ivsize); |
| 101 | |
| 102 | crypto_xor(info, ctx->salt, ivsize); |
| 103 | scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1); |
| 104 | |
| 105 | err = crypto_aead_encrypt(subreq); |
| 106 | if (unlikely(info != req->iv)) |
| 107 | seqiv_aead_encrypt_complete2(req, err); |
| 108 | return err; |
| 109 | } |
| 110 | |
| 111 | static int seqiv_aead_decrypt(struct aead_request *req) |
| 112 | { |
| 113 | struct crypto_aead *geniv = crypto_aead_reqtfm(req); |
| 114 | struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv); |
| 115 | struct aead_request *subreq = aead_request_ctx(req); |
| 116 | crypto_completion_t compl; |
| 117 | void *data; |
| 118 | unsigned int ivsize = 8; |
| 119 | |
| 120 | if (req->cryptlen < ivsize + crypto_aead_authsize(geniv)) |
| 121 | return -EINVAL; |
| 122 | |
| 123 | aead_request_set_tfm(subreq, ctx->child); |
| 124 | |
| 125 | compl = req->base.complete; |
| 126 | data = req->base.data; |
| 127 | |
| 128 | aead_request_set_callback(subreq, req->base.flags, compl, data); |
| 129 | aead_request_set_crypt(subreq, req->src, req->dst, |
| 130 | req->cryptlen - ivsize, req->iv); |
| 131 | aead_request_set_ad(subreq, req->assoclen + ivsize); |
| 132 | |
| 133 | scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0); |
| 134 | |
| 135 | return crypto_aead_decrypt(subreq); |
| 136 | } |
| 137 | |
| 138 | static int seqiv_aead_create(struct crypto_template *tmpl, struct rtattr **tb) |
| 139 | { |
| 140 | struct aead_instance *inst; |
| 141 | int err; |
| 142 | |
| 143 | inst = aead_geniv_alloc(tmpl, tb, 0, 0); |
| 144 | |
| 145 | if (IS_ERR(inst)) |
| 146 | return PTR_ERR(inst); |
| 147 | |
| 148 | err = -EINVAL; |
| 149 | if (inst->alg.ivsize != sizeof(u64)) |
| 150 | goto free_inst; |
| 151 | |
| 152 | inst->alg.encrypt = seqiv_aead_encrypt; |
| 153 | inst->alg.decrypt = seqiv_aead_decrypt; |
| 154 | |
| 155 | inst->alg.init = aead_init_geniv; |
| 156 | inst->alg.exit = aead_exit_geniv; |
| 157 | |
| 158 | inst->alg.base.cra_ctxsize = sizeof(struct aead_geniv_ctx); |
| 159 | inst->alg.base.cra_ctxsize += inst->alg.ivsize; |
| 160 | |
| 161 | err = aead_register_instance(tmpl, inst); |
| 162 | if (err) |
| 163 | goto free_inst; |
| 164 | |
| 165 | out: |
| 166 | return err; |
| 167 | |
| 168 | free_inst: |
| 169 | aead_geniv_free(inst); |
| 170 | goto out; |
| 171 | } |
| 172 | |
| 173 | static int seqiv_create(struct crypto_template *tmpl, struct rtattr **tb) |
| 174 | { |
| 175 | struct crypto_attr_type *algt; |
| 176 | |
| 177 | algt = crypto_get_attr_type(tb); |
| 178 | if (IS_ERR(algt)) |
| 179 | return PTR_ERR(algt); |
| 180 | |
| 181 | if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK) |
| 182 | return -EINVAL; |
| 183 | |
| 184 | return seqiv_aead_create(tmpl, tb); |
| 185 | } |
| 186 | |
| 187 | static void seqiv_free(struct crypto_instance *inst) |
| 188 | { |
| 189 | aead_geniv_free(aead_instance(inst)); |
| 190 | } |
| 191 | |
| 192 | static struct crypto_template seqiv_tmpl = { |
| 193 | .name = "seqiv", |
| 194 | .create = seqiv_create, |
| 195 | .free = seqiv_free, |
| 196 | .module = THIS_MODULE, |
| 197 | }; |
| 198 | |
| 199 | static int __init seqiv_module_init(void) |
| 200 | { |
| 201 | return crypto_register_template(&seqiv_tmpl); |
| 202 | } |
| 203 | |
| 204 | static void __exit seqiv_module_exit(void) |
| 205 | { |
| 206 | crypto_unregister_template(&seqiv_tmpl); |
| 207 | } |
| 208 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 209 | subsys_initcall(seqiv_module_init); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 210 | module_exit(seqiv_module_exit); |
| 211 | |
| 212 | MODULE_LICENSE("GPL"); |
| 213 | MODULE_DESCRIPTION("Sequence Number IV Generator"); |
| 214 | MODULE_ALIAS_CRYPTO("seqiv"); |