blob: bd5e8ccf168790359603347374ff6de1a93de7b0 [file] [log] [blame]
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001/*
2 * Cryptographic API for algorithms (i.e., low-level API).
3 *
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12#ifndef _CRYPTO_ALGAPI_H
13#define _CRYPTO_ALGAPI_H
14
15#include <linux/crypto.h>
16#include <linux/list.h>
17#include <linux/kernel.h>
18#include <linux/skbuff.h>
19
20/*
21 * Maximum values for blocksize and alignmask, used to allocate
22 * static buffers that are big enough for any combination of
23 * ciphers and architectures.
24 */
25#define MAX_CIPHER_BLOCKSIZE 16
26#define MAX_CIPHER_ALIGNMASK 15
27
28struct crypto_aead;
29struct crypto_instance;
30struct module;
31struct rtattr;
32struct seq_file;
33
34struct crypto_type {
35 unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
36 unsigned int (*extsize)(struct crypto_alg *alg);
37 int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
38 int (*init_tfm)(struct crypto_tfm *tfm);
39 void (*show)(struct seq_file *m, struct crypto_alg *alg);
40 int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
41 void (*free)(struct crypto_instance *inst);
42
43 unsigned int type;
44 unsigned int maskclear;
45 unsigned int maskset;
46 unsigned int tfmsize;
47};
48
49struct crypto_instance {
50 struct crypto_alg alg;
51
52 struct crypto_template *tmpl;
53 struct hlist_node list;
54
55 void *__ctx[] CRYPTO_MINALIGN_ATTR;
56};
57
58struct crypto_template {
59 struct list_head list;
60 struct hlist_head instances;
61 struct module *module;
62
63 struct crypto_instance *(*alloc)(struct rtattr **tb);
64 void (*free)(struct crypto_instance *inst);
65 int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
66
67 char name[CRYPTO_MAX_ALG_NAME];
68};
69
70struct crypto_spawn {
71 struct list_head list;
72 struct crypto_alg *alg;
73 struct crypto_instance *inst;
74 const struct crypto_type *frontend;
75 u32 mask;
76};
77
78struct crypto_queue {
79 struct list_head list;
80 struct list_head *backlog;
81
82 unsigned int qlen;
83 unsigned int max_qlen;
84};
85
86struct scatter_walk {
87 struct scatterlist *sg;
88 unsigned int offset;
89};
90
91struct blkcipher_walk {
92 union {
93 struct {
94 struct page *page;
95 unsigned long offset;
96 } phys;
97
98 struct {
99 u8 *page;
100 u8 *addr;
101 } virt;
102 } src, dst;
103
104 struct scatter_walk in;
105 unsigned int nbytes;
106
107 struct scatter_walk out;
108 unsigned int total;
109
110 void *page;
111 u8 *buffer;
112 u8 *iv;
113 unsigned int ivsize;
114
115 int flags;
116 unsigned int walk_blocksize;
117 unsigned int cipher_blocksize;
118 unsigned int alignmask;
119};
120
121struct ablkcipher_walk {
122 struct {
123 struct page *page;
124 unsigned int offset;
125 } src, dst;
126
127 struct scatter_walk in;
128 unsigned int nbytes;
129 struct scatter_walk out;
130 unsigned int total;
131 struct list_head buffers;
132 u8 *iv_buffer;
133 u8 *iv;
134 int flags;
135 unsigned int blocksize;
136};
137
138extern const struct crypto_type crypto_ablkcipher_type;
139extern const struct crypto_type crypto_blkcipher_type;
140
141void crypto_mod_put(struct crypto_alg *alg);
142
143int crypto_register_template(struct crypto_template *tmpl);
144void crypto_unregister_template(struct crypto_template *tmpl);
145struct crypto_template *crypto_lookup_template(const char *name);
146
147int crypto_register_instance(struct crypto_template *tmpl,
148 struct crypto_instance *inst);
149int crypto_unregister_instance(struct crypto_instance *inst);
150
151int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
152 struct crypto_instance *inst, u32 mask);
153int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
154 struct crypto_instance *inst,
155 const struct crypto_type *frontend);
156int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name,
157 u32 type, u32 mask);
158
159void crypto_drop_spawn(struct crypto_spawn *spawn);
160struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
161 u32 mask);
162void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
163
164static inline void crypto_set_spawn(struct crypto_spawn *spawn,
165 struct crypto_instance *inst)
166{
167 spawn->inst = inst;
168}
169
170struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
171int crypto_check_attr_type(struct rtattr **tb, u32 type);
172const char *crypto_attr_alg_name(struct rtattr *rta);
173struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
174 const struct crypto_type *frontend,
175 u32 type, u32 mask);
176
177static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta,
178 u32 type, u32 mask)
179{
180 return crypto_attr_alg2(rta, NULL, type, mask);
181}
182
183int crypto_attr_u32(struct rtattr *rta, u32 *num);
184int crypto_inst_setname(struct crypto_instance *inst, const char *name,
185 struct crypto_alg *alg);
186void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
187 unsigned int head);
188struct crypto_instance *crypto_alloc_instance(const char *name,
189 struct crypto_alg *alg);
190
191void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
192int crypto_enqueue_request(struct crypto_queue *queue,
193 struct crypto_async_request *request);
194struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
195int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
196static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
197{
198 return queue->qlen;
199}
200
201void crypto_inc(u8 *a, unsigned int size);
202void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int size);
203
204static inline void crypto_xor(u8 *dst, const u8 *src, unsigned int size)
205{
206 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
207 __builtin_constant_p(size) &&
208 (size % sizeof(unsigned long)) == 0) {
209 unsigned long *d = (unsigned long *)dst;
210 unsigned long *s = (unsigned long *)src;
211
212 while (size > 0) {
213 *d++ ^= *s++;
214 size -= sizeof(unsigned long);
215 }
216 } else {
217 __crypto_xor(dst, dst, src, size);
218 }
219}
220
221static inline void crypto_xor_cpy(u8 *dst, const u8 *src1, const u8 *src2,
222 unsigned int size)
223{
224 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
225 __builtin_constant_p(size) &&
226 (size % sizeof(unsigned long)) == 0) {
227 unsigned long *d = (unsigned long *)dst;
228 unsigned long *s1 = (unsigned long *)src1;
229 unsigned long *s2 = (unsigned long *)src2;
230
231 while (size > 0) {
232 *d++ = *s1++ ^ *s2++;
233 size -= sizeof(unsigned long);
234 }
235 } else {
236 __crypto_xor(dst, src1, src2, size);
237 }
238}
239
240int blkcipher_walk_done(struct blkcipher_desc *desc,
241 struct blkcipher_walk *walk, int err);
242int blkcipher_walk_virt(struct blkcipher_desc *desc,
243 struct blkcipher_walk *walk);
244int blkcipher_walk_phys(struct blkcipher_desc *desc,
245 struct blkcipher_walk *walk);
246int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
247 struct blkcipher_walk *walk,
248 unsigned int blocksize);
249int blkcipher_aead_walk_virt_block(struct blkcipher_desc *desc,
250 struct blkcipher_walk *walk,
251 struct crypto_aead *tfm,
252 unsigned int blocksize);
253
254int ablkcipher_walk_done(struct ablkcipher_request *req,
255 struct ablkcipher_walk *walk, int err);
256int ablkcipher_walk_phys(struct ablkcipher_request *req,
257 struct ablkcipher_walk *walk);
258void __ablkcipher_walk_complete(struct ablkcipher_walk *walk);
259
260static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
261{
262 return PTR_ALIGN(crypto_tfm_ctx(tfm),
263 crypto_tfm_alg_alignmask(tfm) + 1);
264}
265
266static inline struct crypto_instance *crypto_tfm_alg_instance(
267 struct crypto_tfm *tfm)
268{
269 return container_of(tfm->__crt_alg, struct crypto_instance, alg);
270}
271
272static inline void *crypto_instance_ctx(struct crypto_instance *inst)
273{
274 return inst->__ctx;
275}
276
277static inline struct ablkcipher_alg *crypto_ablkcipher_alg(
278 struct crypto_ablkcipher *tfm)
279{
280 return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher;
281}
282
283static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm)
284{
285 return crypto_tfm_ctx(&tfm->base);
286}
287
288static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm)
289{
290 return crypto_tfm_ctx_aligned(&tfm->base);
291}
292
293static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
294 struct crypto_spawn *spawn)
295{
296 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
297 u32 mask = CRYPTO_ALG_TYPE_MASK;
298
299 return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
300}
301
302static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm)
303{
304 return crypto_tfm_ctx(&tfm->base);
305}
306
307static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm)
308{
309 return crypto_tfm_ctx_aligned(&tfm->base);
310}
311
312static inline struct crypto_cipher *crypto_spawn_cipher(
313 struct crypto_spawn *spawn)
314{
315 u32 type = CRYPTO_ALG_TYPE_CIPHER;
316 u32 mask = CRYPTO_ALG_TYPE_MASK;
317
318 return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask));
319}
320
321static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
322{
323 return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
324}
325
326static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
327 struct scatterlist *dst,
328 struct scatterlist *src,
329 unsigned int nbytes)
330{
331 walk->in.sg = src;
332 walk->out.sg = dst;
333 walk->total = nbytes;
334}
335
336static inline void ablkcipher_walk_init(struct ablkcipher_walk *walk,
337 struct scatterlist *dst,
338 struct scatterlist *src,
339 unsigned int nbytes)
340{
341 walk->in.sg = src;
342 walk->out.sg = dst;
343 walk->total = nbytes;
344 INIT_LIST_HEAD(&walk->buffers);
345}
346
347static inline void ablkcipher_walk_complete(struct ablkcipher_walk *walk)
348{
349 if (unlikely(!list_empty(&walk->buffers)))
350 __ablkcipher_walk_complete(walk);
351}
352
353static inline struct crypto_async_request *crypto_get_backlog(
354 struct crypto_queue *queue)
355{
356 return queue->backlog == &queue->list ? NULL :
357 container_of(queue->backlog, struct crypto_async_request, list);
358}
359
360static inline int ablkcipher_enqueue_request(struct crypto_queue *queue,
361 struct ablkcipher_request *request)
362{
363 return crypto_enqueue_request(queue, &request->base);
364}
365
366static inline struct ablkcipher_request *ablkcipher_dequeue_request(
367 struct crypto_queue *queue)
368{
369 return ablkcipher_request_cast(crypto_dequeue_request(queue));
370}
371
372static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req)
373{
374 return req->__ctx;
375}
376
377static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue,
378 struct crypto_ablkcipher *tfm)
379{
380 return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm));
381}
382
383static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
384 u32 type, u32 mask)
385{
386 return crypto_attr_alg(tb[1], type, mask);
387}
388
389static inline int crypto_requires_off(u32 type, u32 mask, u32 off)
390{
391 return (type ^ off) & mask & off;
392}
393
394/*
395 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
396 * Otherwise returns zero.
397 */
398static inline int crypto_requires_sync(u32 type, u32 mask)
399{
400 return crypto_requires_off(type, mask, CRYPTO_ALG_ASYNC);
401}
402
403noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);
404
405/**
406 * crypto_memneq - Compare two areas of memory without leaking
407 * timing information.
408 *
409 * @a: One area of memory
410 * @b: Another area of memory
411 * @size: The size of the area.
412 *
413 * Returns 0 when data is equal, 1 otherwise.
414 */
415static inline int crypto_memneq(const void *a, const void *b, size_t size)
416{
417 return __crypto_memneq(a, b, size) != 0UL ? 1 : 0;
418}
419
420static inline void crypto_yield(u32 flags)
421{
422#if !defined(CONFIG_PREEMPT) || defined(CONFIG_PREEMPT_VOLUNTARY)
423 if (flags & CRYPTO_TFM_REQ_MAY_SLEEP)
424 cond_resched();
425#endif
426}
427
428#endif /* _CRYPTO_ALGAPI_H */