blob: aed3d2192d013ca397236c901286991f2f455375 [file] [log] [blame]
David Brazdil0f672f62019-12-10 10:32:29 +00001/* SPDX-License-Identifier: GPL-2.0-only */
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00002/*
3 * AMD Cryptographic Coprocessor (CCP) crypto API support
4 *
5 * Copyright (C) 2013,2017 Advanced Micro Devices, Inc.
6 *
7 * Author: Tom Lendacky <thomas.lendacky@amd.com>
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00008 */
9
10#ifndef __CCP_CRYPTO_H__
11#define __CCP_CRYPTO_H__
12
13#include <linux/list.h>
14#include <linux/wait.h>
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000015#include <linux/ccp.h>
16#include <crypto/algapi.h>
17#include <crypto/aes.h>
18#include <crypto/internal/aead.h>
19#include <crypto/aead.h>
20#include <crypto/ctr.h>
21#include <crypto/hash.h>
22#include <crypto/sha.h>
23#include <crypto/akcipher.h>
Olivier Deprez157378f2022-04-04 15:47:50 +020024#include <crypto/skcipher.h>
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000025#include <crypto/internal/rsa.h>
26
David Brazdil0f672f62019-12-10 10:32:29 +000027/* We want the module name in front of our messages */
28#undef pr_fmt
29#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
30
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000031#define CCP_LOG_LEVEL KERN_INFO
32
33#define CCP_CRA_PRIORITY 300
34
Olivier Deprez157378f2022-04-04 15:47:50 +020035struct ccp_crypto_skcipher_alg {
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000036 struct list_head entry;
37
38 u32 mode;
39
Olivier Deprez157378f2022-04-04 15:47:50 +020040 struct skcipher_alg alg;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000041};
42
43struct ccp_crypto_aead {
44 struct list_head entry;
45
46 u32 mode;
47
48 struct aead_alg alg;
49};
50
51struct ccp_crypto_ahash_alg {
52 struct list_head entry;
53
54 const __be32 *init;
55 u32 type;
56 u32 mode;
57
58 /* Child algorithm used for HMAC, CMAC, etc */
59 char child_alg[CRYPTO_MAX_ALG_NAME];
60
61 struct ahash_alg alg;
62};
63
64struct ccp_crypto_akcipher_alg {
65 struct list_head entry;
66
67 struct akcipher_alg alg;
68};
69
Olivier Deprez157378f2022-04-04 15:47:50 +020070static inline struct ccp_crypto_skcipher_alg *
71 ccp_crypto_skcipher_alg(struct crypto_skcipher *tfm)
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000072{
Olivier Deprez157378f2022-04-04 15:47:50 +020073 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000074
Olivier Deprez157378f2022-04-04 15:47:50 +020075 return container_of(alg, struct ccp_crypto_skcipher_alg, alg);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000076}
77
78static inline struct ccp_crypto_ahash_alg *
79 ccp_crypto_ahash_alg(struct crypto_tfm *tfm)
80{
81 struct crypto_alg *alg = tfm->__crt_alg;
82 struct ahash_alg *ahash_alg;
83
84 ahash_alg = container_of(alg, struct ahash_alg, halg.base);
85
86 return container_of(ahash_alg, struct ccp_crypto_ahash_alg, alg);
87}
88
89/***** AES related defines *****/
90struct ccp_aes_ctx {
91 /* Fallback cipher for XTS with unsupported unit sizes */
Olivier Deprez157378f2022-04-04 15:47:50 +020092 struct crypto_skcipher *tfm_skcipher;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000093
94 enum ccp_engine engine;
95 enum ccp_aes_type type;
96 enum ccp_aes_mode mode;
97
98 struct scatterlist key_sg;
99 unsigned int key_len;
100 u8 key[AES_MAX_KEY_SIZE * 2];
101
102 u8 nonce[CTR_RFC3686_NONCE_SIZE];
103
104 /* CMAC key structures */
105 struct scatterlist k1_sg;
106 struct scatterlist k2_sg;
107 unsigned int kn_len;
108 u8 k1[AES_BLOCK_SIZE];
109 u8 k2[AES_BLOCK_SIZE];
110};
111
112struct ccp_aes_req_ctx {
113 struct scatterlist iv_sg;
114 u8 iv[AES_BLOCK_SIZE];
115
116 struct scatterlist tag_sg;
117 u8 tag[AES_BLOCK_SIZE];
118
119 /* Fields used for RFC3686 requests */
120 u8 *rfc3686_info;
121 u8 rfc3686_iv[AES_BLOCK_SIZE];
122
123 struct ccp_cmd cmd;
Olivier Deprez157378f2022-04-04 15:47:50 +0200124
125 struct skcipher_request fallback_req; // keep at the end
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000126};
127
128struct ccp_aes_cmac_req_ctx {
129 unsigned int null_msg;
130 unsigned int final;
131
132 struct scatterlist *src;
133 unsigned int nbytes;
134
135 u64 hash_cnt;
136 unsigned int hash_rem;
137
138 struct sg_table data_sg;
139
140 struct scatterlist iv_sg;
141 u8 iv[AES_BLOCK_SIZE];
142
143 struct scatterlist buf_sg;
144 unsigned int buf_count;
145 u8 buf[AES_BLOCK_SIZE];
146
147 struct scatterlist pad_sg;
148 unsigned int pad_count;
149 u8 pad[AES_BLOCK_SIZE];
150
151 struct ccp_cmd cmd;
152};
153
154struct ccp_aes_cmac_exp_ctx {
155 unsigned int null_msg;
156
157 u8 iv[AES_BLOCK_SIZE];
158
159 unsigned int buf_count;
160 u8 buf[AES_BLOCK_SIZE];
161};
162
163/***** 3DES related defines *****/
164struct ccp_des3_ctx {
165 enum ccp_engine engine;
166 enum ccp_des3_type type;
167 enum ccp_des3_mode mode;
168
169 struct scatterlist key_sg;
170 unsigned int key_len;
171 u8 key[AES_MAX_KEY_SIZE];
172};
173
174struct ccp_des3_req_ctx {
175 struct scatterlist iv_sg;
176 u8 iv[AES_BLOCK_SIZE];
177
178 struct ccp_cmd cmd;
179};
180
181/* SHA-related defines
182 * These values must be large enough to accommodate any variant
183 */
184#define MAX_SHA_CONTEXT_SIZE SHA512_DIGEST_SIZE
185#define MAX_SHA_BLOCK_SIZE SHA512_BLOCK_SIZE
186
187struct ccp_sha_ctx {
188 struct scatterlist opad_sg;
189 unsigned int opad_count;
190
191 unsigned int key_len;
192 u8 key[MAX_SHA_BLOCK_SIZE];
193 u8 ipad[MAX_SHA_BLOCK_SIZE];
194 u8 opad[MAX_SHA_BLOCK_SIZE];
195 struct crypto_shash *hmac_tfm;
196};
197
198struct ccp_sha_req_ctx {
199 enum ccp_sha_type type;
200
201 u64 msg_bits;
202
203 unsigned int first;
204 unsigned int final;
205
206 struct scatterlist *src;
207 unsigned int nbytes;
208
209 u64 hash_cnt;
210 unsigned int hash_rem;
211
212 struct sg_table data_sg;
213
214 struct scatterlist ctx_sg;
215 u8 ctx[MAX_SHA_CONTEXT_SIZE];
216
217 struct scatterlist buf_sg;
218 unsigned int buf_count;
219 u8 buf[MAX_SHA_BLOCK_SIZE];
220
221 /* CCP driver command */
222 struct ccp_cmd cmd;
223};
224
225struct ccp_sha_exp_ctx {
226 enum ccp_sha_type type;
227
228 u64 msg_bits;
229
230 unsigned int first;
231
232 u8 ctx[MAX_SHA_CONTEXT_SIZE];
233
234 unsigned int buf_count;
235 u8 buf[MAX_SHA_BLOCK_SIZE];
236};
237
238/***** RSA related defines *****/
239
240struct ccp_rsa_ctx {
241 unsigned int key_len; /* in bits */
242 struct scatterlist e_sg;
243 u8 *e_buf;
244 unsigned int e_len;
245 struct scatterlist n_sg;
246 u8 *n_buf;
247 unsigned int n_len;
248 struct scatterlist d_sg;
249 u8 *d_buf;
250 unsigned int d_len;
251};
252
253struct ccp_rsa_req_ctx {
254 struct ccp_cmd cmd;
255};
256
257#define CCP_RSA_MAXMOD (4 * 1024 / 8)
258#define CCP5_RSA_MAXMOD (16 * 1024 / 8)
259
260/***** Common Context Structure *****/
261struct ccp_ctx {
262 int (*complete)(struct crypto_async_request *req, int ret);
263
264 union {
265 struct ccp_aes_ctx aes;
266 struct ccp_rsa_ctx rsa;
267 struct ccp_sha_ctx sha;
268 struct ccp_des3_ctx des3;
269 } u;
270};
271
272int ccp_crypto_enqueue_request(struct crypto_async_request *req,
273 struct ccp_cmd *cmd);
274struct scatterlist *ccp_crypto_sg_table_add(struct sg_table *table,
275 struct scatterlist *sg_add);
276
277int ccp_register_aes_algs(struct list_head *head);
278int ccp_register_aes_cmac_algs(struct list_head *head);
279int ccp_register_aes_xts_algs(struct list_head *head);
280int ccp_register_aes_aeads(struct list_head *head);
281int ccp_register_sha_algs(struct list_head *head);
282int ccp_register_des3_algs(struct list_head *head);
283int ccp_register_rsa_algs(struct list_head *head);
284
285#endif