blob: 18dd7a4aaf7da625cab3eb1835c79ef1115a3761 [file] [log] [blame]
David Brazdil0f672f62019-12-10 10:32:29 +00001/* SPDX-License-Identifier: GPL-2.0-or-later */
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00002/*
3 * Cryptographic API for algorithms (i.e., low-level API).
4 *
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00006 */
7#ifndef _CRYPTO_ALGAPI_H
8#define _CRYPTO_ALGAPI_H
9
10#include <linux/crypto.h>
11#include <linux/list.h>
12#include <linux/kernel.h>
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000013
14/*
15 * Maximum values for blocksize and alignmask, used to allocate
16 * static buffers that are big enough for any combination of
David Brazdil0f672f62019-12-10 10:32:29 +000017 * algs and architectures. Ciphers have a lower maximum size.
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000018 */
David Brazdil0f672f62019-12-10 10:32:29 +000019#define MAX_ALGAPI_BLOCKSIZE 160
20#define MAX_ALGAPI_ALIGNMASK 63
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000021#define MAX_CIPHER_BLOCKSIZE 16
22#define MAX_CIPHER_ALIGNMASK 15
23
24struct crypto_aead;
25struct crypto_instance;
26struct module;
27struct rtattr;
28struct seq_file;
Olivier Deprez157378f2022-04-04 15:47:50 +020029struct sk_buff;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000030
31struct crypto_type {
32 unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
33 unsigned int (*extsize)(struct crypto_alg *alg);
34 int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
35 int (*init_tfm)(struct crypto_tfm *tfm);
36 void (*show)(struct seq_file *m, struct crypto_alg *alg);
37 int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
38 void (*free)(struct crypto_instance *inst);
39
40 unsigned int type;
41 unsigned int maskclear;
42 unsigned int maskset;
43 unsigned int tfmsize;
44};
45
46struct crypto_instance {
47 struct crypto_alg alg;
48
49 struct crypto_template *tmpl;
Olivier Deprez157378f2022-04-04 15:47:50 +020050
51 union {
52 /* Node in list of instances after registration. */
53 struct hlist_node list;
54 /* List of attached spawns before registration. */
55 struct crypto_spawn *spawns;
56 };
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000057
58 void *__ctx[] CRYPTO_MINALIGN_ATTR;
59};
60
61struct crypto_template {
62 struct list_head list;
63 struct hlist_head instances;
64 struct module *module;
65
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000066 int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
67
68 char name[CRYPTO_MAX_ALG_NAME];
69};
70
71struct crypto_spawn {
72 struct list_head list;
73 struct crypto_alg *alg;
Olivier Deprez157378f2022-04-04 15:47:50 +020074 union {
75 /* Back pointer to instance after registration.*/
76 struct crypto_instance *inst;
77 /* Spawn list pointer prior to registration. */
78 struct crypto_spawn *next;
79 };
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000080 const struct crypto_type *frontend;
81 u32 mask;
Olivier Deprez157378f2022-04-04 15:47:50 +020082 bool dead;
83 bool registered;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000084};
85
86struct crypto_queue {
87 struct list_head list;
88 struct list_head *backlog;
89
90 unsigned int qlen;
91 unsigned int max_qlen;
92};
93
94struct scatter_walk {
95 struct scatterlist *sg;
96 unsigned int offset;
97};
98
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000099void crypto_mod_put(struct crypto_alg *alg);
100
101int crypto_register_template(struct crypto_template *tmpl);
David Brazdil0f672f62019-12-10 10:32:29 +0000102int crypto_register_templates(struct crypto_template *tmpls, int count);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000103void crypto_unregister_template(struct crypto_template *tmpl);
David Brazdil0f672f62019-12-10 10:32:29 +0000104void crypto_unregister_templates(struct crypto_template *tmpls, int count);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000105struct crypto_template *crypto_lookup_template(const char *name);
106
107int crypto_register_instance(struct crypto_template *tmpl,
108 struct crypto_instance *inst);
Olivier Deprez157378f2022-04-04 15:47:50 +0200109void crypto_unregister_instance(struct crypto_instance *inst);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000110
Olivier Deprez157378f2022-04-04 15:47:50 +0200111int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
112 const char *name, u32 type, u32 mask);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000113void crypto_drop_spawn(struct crypto_spawn *spawn);
114struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
115 u32 mask);
116void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
117
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000118struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
Olivier Deprez157378f2022-04-04 15:47:50 +0200119int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000120const char *crypto_attr_alg_name(struct rtattr *rta);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000121int crypto_attr_u32(struct rtattr *rta, u32 *num);
122int crypto_inst_setname(struct crypto_instance *inst, const char *name,
123 struct crypto_alg *alg);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000124
125void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
126int crypto_enqueue_request(struct crypto_queue *queue,
127 struct crypto_async_request *request);
Olivier Deprez157378f2022-04-04 15:47:50 +0200128void crypto_enqueue_request_head(struct crypto_queue *queue,
129 struct crypto_async_request *request);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000130struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000131static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
132{
133 return queue->qlen;
134}
135
136void crypto_inc(u8 *a, unsigned int size);
137void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int size);
138
139static inline void crypto_xor(u8 *dst, const u8 *src, unsigned int size)
140{
141 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
142 __builtin_constant_p(size) &&
143 (size % sizeof(unsigned long)) == 0) {
144 unsigned long *d = (unsigned long *)dst;
145 unsigned long *s = (unsigned long *)src;
146
147 while (size > 0) {
148 *d++ ^= *s++;
149 size -= sizeof(unsigned long);
150 }
151 } else {
152 __crypto_xor(dst, dst, src, size);
153 }
154}
155
156static inline void crypto_xor_cpy(u8 *dst, const u8 *src1, const u8 *src2,
157 unsigned int size)
158{
159 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
160 __builtin_constant_p(size) &&
161 (size % sizeof(unsigned long)) == 0) {
162 unsigned long *d = (unsigned long *)dst;
163 unsigned long *s1 = (unsigned long *)src1;
164 unsigned long *s2 = (unsigned long *)src2;
165
166 while (size > 0) {
167 *d++ = *s1++ ^ *s2++;
168 size -= sizeof(unsigned long);
169 }
170 } else {
171 __crypto_xor(dst, src1, src2, size);
172 }
173}
174
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000175static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
176{
177 return PTR_ALIGN(crypto_tfm_ctx(tfm),
178 crypto_tfm_alg_alignmask(tfm) + 1);
179}
180
181static inline struct crypto_instance *crypto_tfm_alg_instance(
182 struct crypto_tfm *tfm)
183{
184 return container_of(tfm->__crt_alg, struct crypto_instance, alg);
185}
186
187static inline void *crypto_instance_ctx(struct crypto_instance *inst)
188{
189 return inst->__ctx;
190}
191
Olivier Deprez157378f2022-04-04 15:47:50 +0200192struct crypto_cipher_spawn {
193 struct crypto_spawn base;
194};
195
196static inline int crypto_grab_cipher(struct crypto_cipher_spawn *spawn,
197 struct crypto_instance *inst,
198 const char *name, u32 type, u32 mask)
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000199{
Olivier Deprez157378f2022-04-04 15:47:50 +0200200 type &= ~CRYPTO_ALG_TYPE_MASK;
201 type |= CRYPTO_ALG_TYPE_CIPHER;
202 mask |= CRYPTO_ALG_TYPE_MASK;
203 return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000204}
205
Olivier Deprez157378f2022-04-04 15:47:50 +0200206static inline void crypto_drop_cipher(struct crypto_cipher_spawn *spawn)
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000207{
Olivier Deprez157378f2022-04-04 15:47:50 +0200208 crypto_drop_spawn(&spawn->base);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000209}
210
Olivier Deprez157378f2022-04-04 15:47:50 +0200211static inline struct crypto_alg *crypto_spawn_cipher_alg(
212 struct crypto_cipher_spawn *spawn)
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000213{
Olivier Deprez157378f2022-04-04 15:47:50 +0200214 return spawn->base.alg;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000215}
216
217static inline struct crypto_cipher *crypto_spawn_cipher(
Olivier Deprez157378f2022-04-04 15:47:50 +0200218 struct crypto_cipher_spawn *spawn)
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000219{
220 u32 type = CRYPTO_ALG_TYPE_CIPHER;
221 u32 mask = CRYPTO_ALG_TYPE_MASK;
222
Olivier Deprez157378f2022-04-04 15:47:50 +0200223 return __crypto_cipher_cast(crypto_spawn_tfm(&spawn->base, type, mask));
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000224}
225
226static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
227{
228 return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
229}
230
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000231static inline struct crypto_async_request *crypto_get_backlog(
232 struct crypto_queue *queue)
233{
234 return queue->backlog == &queue->list ? NULL :
235 container_of(queue->backlog, struct crypto_async_request, list);
236}
237
Olivier Deprez157378f2022-04-04 15:47:50 +0200238static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off)
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000239{
Olivier Deprez157378f2022-04-04 15:47:50 +0200240 return (algt->type ^ off) & algt->mask & off;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000241}
242
243/*
Olivier Deprez157378f2022-04-04 15:47:50 +0200244 * When an algorithm uses another algorithm (e.g., if it's an instance of a
245 * template), these are the flags that should always be set on the "outer"
246 * algorithm if any "inner" algorithm has them set.
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000247 */
Olivier Deprez157378f2022-04-04 15:47:50 +0200248#define CRYPTO_ALG_INHERITED_FLAGS \
249 (CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK | \
250 CRYPTO_ALG_ALLOCATES_MEMORY)
251
252/*
253 * Given the type and mask that specify the flags restrictions on a template
254 * instance being created, return the mask that should be passed to
255 * crypto_grab_*() (along with type=0) to honor any request the user made to
256 * have any of the CRYPTO_ALG_INHERITED_FLAGS clear.
257 */
258static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt)
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000259{
Olivier Deprez157378f2022-04-04 15:47:50 +0200260 return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000261}
262
263noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);
264
265/**
266 * crypto_memneq - Compare two areas of memory without leaking
267 * timing information.
268 *
269 * @a: One area of memory
270 * @b: Another area of memory
271 * @size: The size of the area.
272 *
273 * Returns 0 when data is equal, 1 otherwise.
274 */
275static inline int crypto_memneq(const void *a, const void *b, size_t size)
276{
277 return __crypto_memneq(a, b, size) != 0UL ? 1 : 0;
278}
279
David Brazdil0f672f62019-12-10 10:32:29 +0000280int crypto_register_notifier(struct notifier_block *nb);
281int crypto_unregister_notifier(struct notifier_block *nb);
282
283/* Crypto notification events. */
284enum {
285 CRYPTO_MSG_ALG_REQUEST,
286 CRYPTO_MSG_ALG_REGISTER,
287 CRYPTO_MSG_ALG_LOADED,
288};
289
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000290#endif /* _CRYPTO_ALGAPI_H */