blob: 9bf0cce578f026c6927842930f6218417e70d753 [file] [log] [blame]
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001// SPDX-License-Identifier: GPL-2.0
David Brazdil0f672f62019-12-10 10:32:29 +00002/* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00003
4#include <linux/kernel.h>
5#include <linux/module.h>
6#include <crypto/algapi.h>
7#include <crypto/internal/skcipher.h>
David Brazdil0f672f62019-12-10 10:32:29 +00008#include <crypto/internal/des.h>
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00009#include <crypto/xts.h>
David Brazdil0f672f62019-12-10 10:32:29 +000010#include <crypto/sm4.h>
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000011#include <crypto/scatterwalk.h>
12
13#include "cc_driver.h"
14#include "cc_lli_defs.h"
15#include "cc_buffer_mgr.h"
16#include "cc_cipher.h"
17#include "cc_request_mgr.h"
18
19#define MAX_ABLKCIPHER_SEQ_LEN 6
20
21#define template_skcipher template_u.skcipher
22
23struct cc_cipher_handle {
24 struct list_head alg_list;
25};
26
27struct cc_user_key_info {
28 u8 *key;
29 dma_addr_t key_dma_addr;
30};
31
32struct cc_hw_key_info {
33 enum cc_hw_crypto_key key1_slot;
34 enum cc_hw_crypto_key key2_slot;
35};
36
David Brazdil0f672f62019-12-10 10:32:29 +000037struct cc_cpp_key_info {
38 u8 slot;
39 enum cc_cpp_alg alg;
40};
41
42enum cc_key_type {
43 CC_UNPROTECTED_KEY, /* User key */
44 CC_HW_PROTECTED_KEY, /* HW (FDE) key */
45 CC_POLICY_PROTECTED_KEY, /* CPP key */
46 CC_INVALID_PROTECTED_KEY /* Invalid key */
47};
48
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000049struct cc_cipher_ctx {
50 struct cc_drvdata *drvdata;
51 int keylen;
52 int key_round_number;
53 int cipher_mode;
54 int flow_mode;
55 unsigned int flags;
David Brazdil0f672f62019-12-10 10:32:29 +000056 enum cc_key_type key_type;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000057 struct cc_user_key_info user;
David Brazdil0f672f62019-12-10 10:32:29 +000058 union {
59 struct cc_hw_key_info hw;
60 struct cc_cpp_key_info cpp;
61 };
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000062 struct crypto_shash *shash_tfm;
63};
64
65static void cc_cipher_complete(struct device *dev, void *cc_req, int err);
66
David Brazdil0f672f62019-12-10 10:32:29 +000067static inline enum cc_key_type cc_key_type(struct crypto_tfm *tfm)
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000068{
69 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
70
David Brazdil0f672f62019-12-10 10:32:29 +000071 return ctx_p->key_type;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000072}
73
74static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size)
75{
76 switch (ctx_p->flow_mode) {
77 case S_DIN_to_AES:
78 switch (size) {
79 case CC_AES_128_BIT_KEY_SIZE:
80 case CC_AES_192_BIT_KEY_SIZE:
81 if (ctx_p->cipher_mode != DRV_CIPHER_XTS &&
82 ctx_p->cipher_mode != DRV_CIPHER_ESSIV &&
83 ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)
84 return 0;
85 break;
86 case CC_AES_256_BIT_KEY_SIZE:
87 return 0;
88 case (CC_AES_192_BIT_KEY_SIZE * 2):
89 case (CC_AES_256_BIT_KEY_SIZE * 2):
90 if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
91 ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
92 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)
93 return 0;
94 break;
95 default:
96 break;
97 }
David Brazdil0f672f62019-12-10 10:32:29 +000098 break;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000099 case S_DIN_to_DES:
100 if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE)
101 return 0;
102 break;
David Brazdil0f672f62019-12-10 10:32:29 +0000103 case S_DIN_to_SM4:
104 if (size == SM4_KEY_SIZE)
105 return 0;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000106 default:
107 break;
108 }
109 return -EINVAL;
110}
111
112static int validate_data_size(struct cc_cipher_ctx *ctx_p,
113 unsigned int size)
114{
115 switch (ctx_p->flow_mode) {
116 case S_DIN_to_AES:
117 switch (ctx_p->cipher_mode) {
118 case DRV_CIPHER_XTS:
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000119 case DRV_CIPHER_CBC_CTS:
120 if (size >= AES_BLOCK_SIZE)
121 return 0;
122 break;
123 case DRV_CIPHER_OFB:
124 case DRV_CIPHER_CTR:
125 return 0;
126 case DRV_CIPHER_ECB:
127 case DRV_CIPHER_CBC:
128 case DRV_CIPHER_ESSIV:
129 case DRV_CIPHER_BITLOCKER:
130 if (IS_ALIGNED(size, AES_BLOCK_SIZE))
131 return 0;
132 break;
133 default:
134 break;
135 }
136 break;
137 case S_DIN_to_DES:
138 if (IS_ALIGNED(size, DES_BLOCK_SIZE))
139 return 0;
140 break;
David Brazdil0f672f62019-12-10 10:32:29 +0000141 case S_DIN_to_SM4:
142 switch (ctx_p->cipher_mode) {
143 case DRV_CIPHER_CTR:
144 return 0;
145 case DRV_CIPHER_ECB:
146 case DRV_CIPHER_CBC:
147 if (IS_ALIGNED(size, SM4_BLOCK_SIZE))
148 return 0;
149 default:
150 break;
151 }
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000152 default:
153 break;
154 }
155 return -EINVAL;
156}
157
158static int cc_cipher_init(struct crypto_tfm *tfm)
159{
160 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
161 struct cc_crypto_alg *cc_alg =
162 container_of(tfm->__crt_alg, struct cc_crypto_alg,
163 skcipher_alg.base);
164 struct device *dev = drvdata_to_dev(cc_alg->drvdata);
165 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000166
167 dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p,
168 crypto_tfm_alg_name(tfm));
169
170 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
171 sizeof(struct cipher_req_ctx));
172
173 ctx_p->cipher_mode = cc_alg->cipher_mode;
174 ctx_p->flow_mode = cc_alg->flow_mode;
175 ctx_p->drvdata = cc_alg->drvdata;
176
Olivier Deprez0e641232021-09-23 10:07:05 +0200177 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
178 /* Alloc hash tfm for essiv */
179 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
180 if (IS_ERR(ctx_p->shash_tfm)) {
181 dev_err(dev, "Error allocating hash tfm for ESSIV.\n");
182 return PTR_ERR(ctx_p->shash_tfm);
183 }
184 }
185
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000186 /* Allocate key buffer, cache line aligned */
187 ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL);
188 if (!ctx_p->user.key)
Olivier Deprez0e641232021-09-23 10:07:05 +0200189 goto free_shash;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000190
191 dev_dbg(dev, "Allocated key buffer in context. key=@%p\n",
192 ctx_p->user.key);
193
194 /* Map key buffer */
195 ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key,
196 max_key_buf_size,
197 DMA_TO_DEVICE);
198 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
199 dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n",
200 max_key_buf_size, ctx_p->user.key);
Olivier Deprez0e641232021-09-23 10:07:05 +0200201 goto free_key;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000202 }
203 dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n",
204 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr);
205
Olivier Deprez0e641232021-09-23 10:07:05 +0200206 return 0;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000207
Olivier Deprez0e641232021-09-23 10:07:05 +0200208free_key:
209 kfree(ctx_p->user.key);
210free_shash:
211 crypto_free_shash(ctx_p->shash_tfm);
212
213 return -ENOMEM;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000214}
215
216static void cc_cipher_exit(struct crypto_tfm *tfm)
217{
218 struct crypto_alg *alg = tfm->__crt_alg;
219 struct cc_crypto_alg *cc_alg =
220 container_of(alg, struct cc_crypto_alg,
221 skcipher_alg.base);
222 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
223 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
224 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
225
226 dev_dbg(dev, "Clearing context @%p for %s\n",
227 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
228
229 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
230 /* Free hash tfm for essiv */
231 crypto_free_shash(ctx_p->shash_tfm);
232 ctx_p->shash_tfm = NULL;
233 }
234
235 /* Unmap key buffer */
236 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
237 DMA_TO_DEVICE);
238 dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n",
239 &ctx_p->user.key_dma_addr);
240
241 /* Free key buffer in context */
242 kzfree(ctx_p->user.key);
243 dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key);
244}
245
246struct tdes_keys {
247 u8 key1[DES_KEY_SIZE];
248 u8 key2[DES_KEY_SIZE];
249 u8 key3[DES_KEY_SIZE];
250};
251
David Brazdil0f672f62019-12-10 10:32:29 +0000252static enum cc_hw_crypto_key cc_slot_to_hw_key(u8 slot_num)
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000253{
254 switch (slot_num) {
255 case 0:
256 return KFDE0_KEY;
257 case 1:
258 return KFDE1_KEY;
259 case 2:
260 return KFDE2_KEY;
261 case 3:
262 return KFDE3_KEY;
263 }
264 return END_OF_KEYS;
265}
266
David Brazdil0f672f62019-12-10 10:32:29 +0000267static u8 cc_slot_to_cpp_key(u8 slot_num)
268{
269 return (slot_num - CC_FIRST_CPP_KEY_SLOT);
270}
271
272static inline enum cc_key_type cc_slot_to_key_type(u8 slot_num)
273{
274 if (slot_num >= CC_FIRST_HW_KEY_SLOT && slot_num <= CC_LAST_HW_KEY_SLOT)
275 return CC_HW_PROTECTED_KEY;
276 else if (slot_num >= CC_FIRST_CPP_KEY_SLOT &&
277 slot_num <= CC_LAST_CPP_KEY_SLOT)
278 return CC_POLICY_PROTECTED_KEY;
279 else
280 return CC_INVALID_PROTECTED_KEY;
281}
282
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000283static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key,
284 unsigned int keylen)
285{
286 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
287 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
288 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
289 struct cc_hkey_info hki;
290
291 dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n",
292 ctx_p, crypto_tfm_alg_name(tfm), keylen);
293 dump_byte_array("key", (u8 *)key, keylen);
294
295 /* STAT_PHASE_0: Init and sanity checks */
296
David Brazdil0f672f62019-12-10 10:32:29 +0000297 /* This check the size of the protected key token */
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000298 if (keylen != sizeof(hki)) {
David Brazdil0f672f62019-12-10 10:32:29 +0000299 dev_err(dev, "Unsupported protected key size %d.\n", keylen);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000300 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
301 return -EINVAL;
302 }
303
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000304 memcpy(&hki, key, keylen);
305
306 /* The real key len for crypto op is the size of the HW key
307 * referenced by the HW key slot, not the hardware key token
308 */
309 keylen = hki.keylen;
310
311 if (validate_keys_sizes(ctx_p, keylen)) {
312 dev_err(dev, "Unsupported key size %d.\n", keylen);
313 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
314 return -EINVAL;
315 }
316
David Brazdil0f672f62019-12-10 10:32:29 +0000317 ctx_p->keylen = keylen;
318
319 switch (cc_slot_to_key_type(hki.hw_key1)) {
320 case CC_HW_PROTECTED_KEY:
321 if (ctx_p->flow_mode == S_DIN_to_SM4) {
322 dev_err(dev, "Only AES HW protected keys are supported\n");
323 return -EINVAL;
324 }
325
326 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1);
327 if (ctx_p->hw.key1_slot == END_OF_KEYS) {
328 dev_err(dev, "Unsupported hw key1 number (%d)\n",
329 hki.hw_key1);
330 return -EINVAL;
331 }
332
333 if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
334 ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
335 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) {
336 if (hki.hw_key1 == hki.hw_key2) {
337 dev_err(dev, "Illegal hw key numbers (%d,%d)\n",
338 hki.hw_key1, hki.hw_key2);
339 return -EINVAL;
340 }
341
342 ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2);
343 if (ctx_p->hw.key2_slot == END_OF_KEYS) {
344 dev_err(dev, "Unsupported hw key2 number (%d)\n",
345 hki.hw_key2);
346 return -EINVAL;
347 }
348 }
349
350 ctx_p->key_type = CC_HW_PROTECTED_KEY;
351 dev_dbg(dev, "HW protected key %d/%d set\n.",
352 ctx_p->hw.key1_slot, ctx_p->hw.key2_slot);
353 break;
354
355 case CC_POLICY_PROTECTED_KEY:
356 if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) {
357 dev_err(dev, "CPP keys not supported in this hardware revision.\n");
358 return -EINVAL;
359 }
360
361 if (ctx_p->cipher_mode != DRV_CIPHER_CBC &&
362 ctx_p->cipher_mode != DRV_CIPHER_CTR) {
363 dev_err(dev, "CPP keys only supported in CBC or CTR modes.\n");
364 return -EINVAL;
365 }
366
367 ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1);
368 if (ctx_p->flow_mode == S_DIN_to_AES)
369 ctx_p->cpp.alg = CC_CPP_AES;
370 else /* Must be SM4 since due to sethkey registration */
371 ctx_p->cpp.alg = CC_CPP_SM4;
372 ctx_p->key_type = CC_POLICY_PROTECTED_KEY;
373 dev_dbg(dev, "policy protected key alg: %d slot: %d.\n",
374 ctx_p->cpp.alg, ctx_p->cpp.slot);
375 break;
376
377 default:
378 dev_err(dev, "Unsupported protected key (%d)\n", hki.hw_key1);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000379 return -EINVAL;
380 }
381
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000382 return 0;
383}
384
385static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key,
386 unsigned int keylen)
387{
388 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
389 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
390 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000391 struct cc_crypto_alg *cc_alg =
392 container_of(tfm->__crt_alg, struct cc_crypto_alg,
393 skcipher_alg.base);
394 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
395
396 dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n",
397 ctx_p, crypto_tfm_alg_name(tfm), keylen);
398 dump_byte_array("key", (u8 *)key, keylen);
399
400 /* STAT_PHASE_0: Init and sanity checks */
401
402 if (validate_keys_sizes(ctx_p, keylen)) {
403 dev_err(dev, "Unsupported key size %d.\n", keylen);
404 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
405 return -EINVAL;
406 }
407
David Brazdil0f672f62019-12-10 10:32:29 +0000408 ctx_p->key_type = CC_UNPROTECTED_KEY;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000409
410 /*
411 * Verify DES weak keys
412 * Note that we're dropping the expanded key since the
413 * HW does the expansion on its own.
414 */
415 if (ctx_p->flow_mode == S_DIN_to_DES) {
David Brazdil0f672f62019-12-10 10:32:29 +0000416 if ((keylen == DES3_EDE_KEY_SIZE &&
417 verify_skcipher_des3_key(sktfm, key)) ||
418 verify_skcipher_des_key(sktfm, key)) {
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000419 dev_dbg(dev, "weak DES key");
420 return -EINVAL;
421 }
422 }
423
424 if (ctx_p->cipher_mode == DRV_CIPHER_XTS &&
425 xts_check_key(tfm, key, keylen)) {
426 dev_dbg(dev, "weak XTS key");
427 return -EINVAL;
428 }
429
430 /* STAT_PHASE_1: Copy key to ctx */
431 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
432 max_key_buf_size, DMA_TO_DEVICE);
433
434 memcpy(ctx_p->user.key, key, keylen);
435 if (keylen == 24)
436 memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
437
438 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
439 /* sha256 for key2 - use sw implementation */
440 int key_len = keylen >> 1;
441 int err;
442
443 SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm);
444
445 desc->tfm = ctx_p->shash_tfm;
446
447 err = crypto_shash_digest(desc, ctx_p->user.key, key_len,
448 ctx_p->user.key + key_len);
449 if (err) {
450 dev_err(dev, "Failed to hash ESSIV key.\n");
451 return err;
452 }
453 }
454 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
455 max_key_buf_size, DMA_TO_DEVICE);
456 ctx_p->keylen = keylen;
457
458 dev_dbg(dev, "return safely");
459 return 0;
460}
461
David Brazdil0f672f62019-12-10 10:32:29 +0000462static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p)
463{
464 switch (ctx_p->flow_mode) {
465 case S_DIN_to_AES:
466 return S_AES_to_DOUT;
467 case S_DIN_to_DES:
468 return S_DES_to_DOUT;
469 case S_DIN_to_SM4:
470 return S_SM4_to_DOUT;
471 default:
472 return ctx_p->flow_mode;
473 }
474}
475
476static void cc_setup_readiv_desc(struct crypto_tfm *tfm,
477 struct cipher_req_ctx *req_ctx,
478 unsigned int ivsize, struct cc_hw_desc desc[],
479 unsigned int *seq_size)
480{
481 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
482 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
483 int cipher_mode = ctx_p->cipher_mode;
484 int flow_mode = cc_out_setup_mode(ctx_p);
485 int direction = req_ctx->gen_ctx.op_type;
486 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
487
488 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY)
489 return;
490
491 switch (cipher_mode) {
492 case DRV_CIPHER_ECB:
493 break;
494 case DRV_CIPHER_CBC:
495 case DRV_CIPHER_CBC_CTS:
496 case DRV_CIPHER_CTR:
497 case DRV_CIPHER_OFB:
498 /* Read next IV */
499 hw_desc_init(&desc[*seq_size]);
500 set_dout_dlli(&desc[*seq_size], iv_dma_addr, ivsize, NS_BIT, 1);
501 set_cipher_config0(&desc[*seq_size], direction);
502 set_flow_mode(&desc[*seq_size], flow_mode);
503 set_cipher_mode(&desc[*seq_size], cipher_mode);
504 if (cipher_mode == DRV_CIPHER_CTR ||
505 cipher_mode == DRV_CIPHER_OFB) {
506 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
507 } else {
508 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE0);
509 }
510 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
511 (*seq_size)++;
512 break;
513 case DRV_CIPHER_XTS:
514 case DRV_CIPHER_ESSIV:
515 case DRV_CIPHER_BITLOCKER:
516 /* IV */
517 hw_desc_init(&desc[*seq_size]);
518 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
519 set_cipher_mode(&desc[*seq_size], cipher_mode);
520 set_cipher_config0(&desc[*seq_size], direction);
521 set_flow_mode(&desc[*seq_size], flow_mode);
522 set_dout_dlli(&desc[*seq_size], iv_dma_addr, CC_AES_BLOCK_SIZE,
523 NS_BIT, 1);
524 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
525 (*seq_size)++;
526 break;
527 default:
528 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
529 }
530}
531
Olivier Deprez0e641232021-09-23 10:07:05 +0200532
David Brazdil0f672f62019-12-10 10:32:29 +0000533static void cc_setup_state_desc(struct crypto_tfm *tfm,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000534 struct cipher_req_ctx *req_ctx,
535 unsigned int ivsize, unsigned int nbytes,
536 struct cc_hw_desc desc[],
537 unsigned int *seq_size)
538{
539 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
540 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
541 int cipher_mode = ctx_p->cipher_mode;
542 int flow_mode = ctx_p->flow_mode;
543 int direction = req_ctx->gen_ctx.op_type;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000544 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
545 unsigned int du_size = nbytes;
546
547 struct cc_crypto_alg *cc_alg =
548 container_of(tfm->__crt_alg, struct cc_crypto_alg,
549 skcipher_alg.base);
550
551 if (cc_alg->data_unit)
552 du_size = cc_alg->data_unit;
553
554 switch (cipher_mode) {
David Brazdil0f672f62019-12-10 10:32:29 +0000555 case DRV_CIPHER_ECB:
556 break;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000557 case DRV_CIPHER_CBC:
558 case DRV_CIPHER_CBC_CTS:
559 case DRV_CIPHER_CTR:
560 case DRV_CIPHER_OFB:
David Brazdil0f672f62019-12-10 10:32:29 +0000561 /* Load IV */
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000562 hw_desc_init(&desc[*seq_size]);
563 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
564 NS_BIT);
565 set_cipher_config0(&desc[*seq_size], direction);
566 set_flow_mode(&desc[*seq_size], flow_mode);
567 set_cipher_mode(&desc[*seq_size], cipher_mode);
568 if (cipher_mode == DRV_CIPHER_CTR ||
569 cipher_mode == DRV_CIPHER_OFB) {
570 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
571 } else {
572 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
573 }
574 (*seq_size)++;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000575 break;
576 case DRV_CIPHER_XTS:
577 case DRV_CIPHER_ESSIV:
578 case DRV_CIPHER_BITLOCKER:
Olivier Deprez0e641232021-09-23 10:07:05 +0200579 break;
580 default:
581 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
582 }
583}
584
585
586static void cc_setup_xex_state_desc(struct crypto_tfm *tfm,
587 struct cipher_req_ctx *req_ctx,
588 unsigned int ivsize, unsigned int nbytes,
589 struct cc_hw_desc desc[],
590 unsigned int *seq_size)
591{
592 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
593 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
594 int cipher_mode = ctx_p->cipher_mode;
595 int flow_mode = ctx_p->flow_mode;
596 int direction = req_ctx->gen_ctx.op_type;
597 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
598 unsigned int key_len = ctx_p->keylen;
599 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
600 unsigned int du_size = nbytes;
601
602 struct cc_crypto_alg *cc_alg =
603 container_of(tfm->__crt_alg, struct cc_crypto_alg,
604 skcipher_alg.base);
605
606 if (cc_alg->data_unit)
607 du_size = cc_alg->data_unit;
608
609 switch (cipher_mode) {
610 case DRV_CIPHER_ECB:
611 break;
612 case DRV_CIPHER_CBC:
613 case DRV_CIPHER_CBC_CTS:
614 case DRV_CIPHER_CTR:
615 case DRV_CIPHER_OFB:
616 break;
617 case DRV_CIPHER_XTS:
618 case DRV_CIPHER_ESSIV:
619 case DRV_CIPHER_BITLOCKER:
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000620 /* load XEX key */
621 hw_desc_init(&desc[*seq_size]);
622 set_cipher_mode(&desc[*seq_size], cipher_mode);
623 set_cipher_config0(&desc[*seq_size], direction);
David Brazdil0f672f62019-12-10 10:32:29 +0000624 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000625 set_hw_crypto_key(&desc[*seq_size],
626 ctx_p->hw.key2_slot);
627 } else {
628 set_din_type(&desc[*seq_size], DMA_DLLI,
629 (key_dma_addr + (key_len / 2)),
630 (key_len / 2), NS_BIT);
631 }
632 set_xex_data_unit_size(&desc[*seq_size], du_size);
633 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
634 set_key_size_aes(&desc[*seq_size], (key_len / 2));
635 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
636 (*seq_size)++;
637
David Brazdil0f672f62019-12-10 10:32:29 +0000638 /* Load IV */
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000639 hw_desc_init(&desc[*seq_size]);
640 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
641 set_cipher_mode(&desc[*seq_size], cipher_mode);
642 set_cipher_config0(&desc[*seq_size], direction);
643 set_key_size_aes(&desc[*seq_size], (key_len / 2));
644 set_flow_mode(&desc[*seq_size], flow_mode);
645 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
646 CC_AES_BLOCK_SIZE, NS_BIT);
647 (*seq_size)++;
648 break;
649 default:
650 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
651 }
652}
653
David Brazdil0f672f62019-12-10 10:32:29 +0000654static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p)
655{
656 switch (ctx_p->flow_mode) {
657 case S_DIN_to_AES:
658 return DIN_AES_DOUT;
659 case S_DIN_to_DES:
660 return DIN_DES_DOUT;
661 case S_DIN_to_SM4:
662 return DIN_SM4_DOUT;
663 default:
664 return ctx_p->flow_mode;
665 }
666}
667
668static void cc_setup_key_desc(struct crypto_tfm *tfm,
669 struct cipher_req_ctx *req_ctx,
670 unsigned int nbytes, struct cc_hw_desc desc[],
671 unsigned int *seq_size)
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000672{
673 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
674 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
David Brazdil0f672f62019-12-10 10:32:29 +0000675 int cipher_mode = ctx_p->cipher_mode;
676 int flow_mode = ctx_p->flow_mode;
677 int direction = req_ctx->gen_ctx.op_type;
678 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
679 unsigned int key_len = ctx_p->keylen;
680 unsigned int din_size;
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000681
David Brazdil0f672f62019-12-10 10:32:29 +0000682 switch (cipher_mode) {
683 case DRV_CIPHER_CBC:
684 case DRV_CIPHER_CBC_CTS:
685 case DRV_CIPHER_CTR:
686 case DRV_CIPHER_OFB:
687 case DRV_CIPHER_ECB:
688 /* Load key */
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000689 hw_desc_init(&desc[*seq_size]);
David Brazdil0f672f62019-12-10 10:32:29 +0000690 set_cipher_mode(&desc[*seq_size], cipher_mode);
691 set_cipher_config0(&desc[*seq_size], direction);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000692
David Brazdil0f672f62019-12-10 10:32:29 +0000693 if (cc_key_type(tfm) == CC_POLICY_PROTECTED_KEY) {
694 /* We use the AES key size coding for all CPP algs */
695 set_key_size_aes(&desc[*seq_size], key_len);
696 set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot);
697 flow_mode = cc_out_flow_mode(ctx_p);
698 } else {
699 if (flow_mode == S_DIN_to_AES) {
700 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
701 set_hw_crypto_key(&desc[*seq_size],
702 ctx_p->hw.key1_slot);
703 } else {
704 /* CC_POLICY_UNPROTECTED_KEY
705 * Invalid keys are filtered out in
706 * sethkey()
707 */
708 din_size = (key_len == 24) ?
709 AES_MAX_KEY_SIZE : key_len;
710
711 set_din_type(&desc[*seq_size], DMA_DLLI,
712 key_dma_addr, din_size,
713 NS_BIT);
714 }
715 set_key_size_aes(&desc[*seq_size], key_len);
716 } else {
717 /*des*/
718 set_din_type(&desc[*seq_size], DMA_DLLI,
719 key_dma_addr, key_len, NS_BIT);
720 set_key_size_des(&desc[*seq_size], key_len);
721 }
722 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
723 }
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000724 set_flow_mode(&desc[*seq_size], flow_mode);
725 (*seq_size)++;
David Brazdil0f672f62019-12-10 10:32:29 +0000726 break;
727 case DRV_CIPHER_XTS:
728 case DRV_CIPHER_ESSIV:
729 case DRV_CIPHER_BITLOCKER:
730 /* Load AES key */
731 hw_desc_init(&desc[*seq_size]);
732 set_cipher_mode(&desc[*seq_size], cipher_mode);
733 set_cipher_config0(&desc[*seq_size], direction);
734 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
735 set_hw_crypto_key(&desc[*seq_size],
736 ctx_p->hw.key1_slot);
737 } else {
738 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
739 (key_len / 2), NS_BIT);
740 }
741 set_key_size_aes(&desc[*seq_size], (key_len / 2));
742 set_flow_mode(&desc[*seq_size], flow_mode);
743 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
744 (*seq_size)++;
745 break;
746 default:
747 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
748 }
749}
750
751static void cc_setup_mlli_desc(struct crypto_tfm *tfm,
752 struct cipher_req_ctx *req_ctx,
753 struct scatterlist *dst, struct scatterlist *src,
754 unsigned int nbytes, void *areq,
755 struct cc_hw_desc desc[], unsigned int *seq_size)
756{
757 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
758 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
759
760 if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) {
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000761 /* bypass */
762 dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n",
763 &req_ctx->mlli_params.mlli_dma_addr,
764 req_ctx->mlli_params.mlli_len,
765 (unsigned int)ctx_p->drvdata->mlli_sram_addr);
766 hw_desc_init(&desc[*seq_size]);
767 set_din_type(&desc[*seq_size], DMA_DLLI,
768 req_ctx->mlli_params.mlli_dma_addr,
769 req_ctx->mlli_params.mlli_len, NS_BIT);
770 set_dout_sram(&desc[*seq_size],
771 ctx_p->drvdata->mlli_sram_addr,
772 req_ctx->mlli_params.mlli_len);
773 set_flow_mode(&desc[*seq_size], BYPASS);
774 (*seq_size)++;
David Brazdil0f672f62019-12-10 10:32:29 +0000775 }
776}
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000777
David Brazdil0f672f62019-12-10 10:32:29 +0000778static void cc_setup_flow_desc(struct crypto_tfm *tfm,
779 struct cipher_req_ctx *req_ctx,
780 struct scatterlist *dst, struct scatterlist *src,
781 unsigned int nbytes, struct cc_hw_desc desc[],
782 unsigned int *seq_size)
783{
784 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
785 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
786 unsigned int flow_mode = cc_out_flow_mode(ctx_p);
787 bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY ||
788 ctx_p->cipher_mode == DRV_CIPHER_ECB);
789
790 /* Process */
791 if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) {
792 dev_dbg(dev, " data params addr %pad length 0x%X\n",
793 &sg_dma_address(src), nbytes);
794 dev_dbg(dev, " data params addr %pad length 0x%X\n",
795 &sg_dma_address(dst), nbytes);
796 hw_desc_init(&desc[*seq_size]);
797 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
798 nbytes, NS_BIT);
799 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
800 nbytes, NS_BIT, (!last_desc ? 0 : 1));
801 if (last_desc)
802 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
803
804 set_flow_mode(&desc[*seq_size], flow_mode);
805 (*seq_size)++;
806 } else {
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000807 hw_desc_init(&desc[*seq_size]);
808 set_din_type(&desc[*seq_size], DMA_MLLI,
809 ctx_p->drvdata->mlli_sram_addr,
810 req_ctx->in_mlli_nents, NS_BIT);
811 if (req_ctx->out_nents == 0) {
812 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
813 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
814 (unsigned int)ctx_p->drvdata->mlli_sram_addr);
815 set_dout_mlli(&desc[*seq_size],
816 ctx_p->drvdata->mlli_sram_addr,
817 req_ctx->in_mlli_nents, NS_BIT,
David Brazdil0f672f62019-12-10 10:32:29 +0000818 (!last_desc ? 0 : 1));
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000819 } else {
820 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
821 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
822 (unsigned int)ctx_p->drvdata->mlli_sram_addr +
823 (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents);
824 set_dout_mlli(&desc[*seq_size],
825 (ctx_p->drvdata->mlli_sram_addr +
826 (LLI_ENTRY_BYTE_SIZE *
827 req_ctx->in_mlli_nents)),
828 req_ctx->out_mlli_nents, NS_BIT,
David Brazdil0f672f62019-12-10 10:32:29 +0000829 (!last_desc ? 0 : 1));
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000830 }
David Brazdil0f672f62019-12-10 10:32:29 +0000831 if (last_desc)
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000832 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
833
834 set_flow_mode(&desc[*seq_size], flow_mode);
835 (*seq_size)++;
836 }
837}
838
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000839static void cc_cipher_complete(struct device *dev, void *cc_req, int err)
840{
841 struct skcipher_request *req = (struct skcipher_request *)cc_req;
842 struct scatterlist *dst = req->dst;
843 struct scatterlist *src = req->src;
844 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
845 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000846 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000847
David Brazdil0f672f62019-12-10 10:32:29 +0000848 if (err != -EINPROGRESS) {
849 /* Not a BACKLOG notification */
850 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
851 memcpy(req->iv, req_ctx->iv, ivsize);
852 kzfree(req_ctx->iv);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000853 }
854
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000855 skcipher_request_complete(req, err);
856}
857
858static int cc_cipher_process(struct skcipher_request *req,
859 enum drv_crypto_direction direction)
860{
861 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
862 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
863 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
864 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
865 struct scatterlist *dst = req->dst;
866 struct scatterlist *src = req->src;
867 unsigned int nbytes = req->cryptlen;
868 void *iv = req->iv;
869 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
870 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
871 struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN];
872 struct cc_crypto_req cc_req = {};
873 int rc;
874 unsigned int seq_len = 0;
875 gfp_t flags = cc_gfp_flags(&req->base);
876
877 dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n",
878 ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ?
879 "Encrypt" : "Decrypt"), req, iv, nbytes);
880
881 /* STAT_PHASE_0: Init and sanity checks */
882
883 /* TODO: check data length according to mode */
884 if (validate_data_size(ctx_p, nbytes)) {
885 dev_err(dev, "Unsupported data size %d.\n", nbytes);
886 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
887 rc = -EINVAL;
888 goto exit_process;
889 }
890 if (nbytes == 0) {
891 /* No data to process is valid */
892 rc = 0;
893 goto exit_process;
894 }
895
896 /* The IV we are handed may be allocted from the stack so
897 * we must copy it to a DMAable buffer before use.
898 */
899 req_ctx->iv = kmemdup(iv, ivsize, flags);
900 if (!req_ctx->iv) {
901 rc = -ENOMEM;
902 goto exit_process;
903 }
904
905 /* Setup request structure */
906 cc_req.user_cb = (void *)cc_cipher_complete;
907 cc_req.user_arg = (void *)req;
908
David Brazdil0f672f62019-12-10 10:32:29 +0000909 /* Setup CPP operation details */
910 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) {
911 cc_req.cpp.is_cpp = true;
912 cc_req.cpp.alg = ctx_p->cpp.alg;
913 cc_req.cpp.slot = ctx_p->cpp.slot;
914 }
915
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000916 /* Setup request context */
917 req_ctx->gen_ctx.op_type = direction;
918
919 /* STAT_PHASE_1: Map buffers */
920
921 rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes,
922 req_ctx->iv, src, dst, flags);
923 if (rc) {
924 dev_err(dev, "map_request() failed\n");
925 goto exit_process;
926 }
927
928 /* STAT_PHASE_2: Create sequence */
929
Olivier Deprez0e641232021-09-23 10:07:05 +0200930 /* Setup state (IV) */
David Brazdil0f672f62019-12-10 10:32:29 +0000931 cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
932 /* Setup MLLI line, if needed */
933 cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len);
934 /* Setup key */
935 cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len);
Olivier Deprez0e641232021-09-23 10:07:05 +0200936 /* Setup state (IV and XEX key) */
937 cc_setup_xex_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000938 /* Data processing */
David Brazdil0f672f62019-12-10 10:32:29 +0000939 cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len);
940 /* Read next IV */
941 cc_setup_readiv_desc(tfm, req_ctx, ivsize, desc, &seq_len);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000942
943 /* STAT_PHASE_3: Lock HW and push sequence */
944
945 rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len,
946 &req->base);
947 if (rc != -EINPROGRESS && rc != -EBUSY) {
948 /* Failed to send the request or request completed
949 * synchronously
950 */
951 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
952 }
953
954exit_process:
955 if (rc != -EINPROGRESS && rc != -EBUSY) {
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000956 kzfree(req_ctx->iv);
957 }
958
959 return rc;
960}
961
962static int cc_cipher_encrypt(struct skcipher_request *req)
963{
964 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
965
966 memset(req_ctx, 0, sizeof(*req_ctx));
967
968 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT);
969}
970
971static int cc_cipher_decrypt(struct skcipher_request *req)
972{
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000973 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000974
975 memset(req_ctx, 0, sizeof(*req_ctx));
976
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000977 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT);
978}
979
980/* Block cipher alg */
981static const struct cc_alg_template skcipher_algs[] = {
982 {
983 .name = "xts(paes)",
984 .driver_name = "xts-paes-ccree",
David Brazdil0f672f62019-12-10 10:32:29 +0000985 .blocksize = 1,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000986 .template_skcipher = {
987 .setkey = cc_cipher_sethkey,
988 .encrypt = cc_cipher_encrypt,
989 .decrypt = cc_cipher_decrypt,
990 .min_keysize = CC_HW_KEY_SIZE,
991 .max_keysize = CC_HW_KEY_SIZE,
992 .ivsize = AES_BLOCK_SIZE,
993 },
994 .cipher_mode = DRV_CIPHER_XTS,
995 .flow_mode = S_DIN_to_AES,
996 .min_hw_rev = CC_HW_REV_630,
David Brazdil0f672f62019-12-10 10:32:29 +0000997 .std_body = CC_STD_NIST,
998 .sec_func = true,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000999 },
1000 {
1001 .name = "xts512(paes)",
1002 .driver_name = "xts-paes-du512-ccree",
David Brazdil0f672f62019-12-10 10:32:29 +00001003 .blocksize = 1,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001004 .template_skcipher = {
1005 .setkey = cc_cipher_sethkey,
1006 .encrypt = cc_cipher_encrypt,
1007 .decrypt = cc_cipher_decrypt,
1008 .min_keysize = CC_HW_KEY_SIZE,
1009 .max_keysize = CC_HW_KEY_SIZE,
1010 .ivsize = AES_BLOCK_SIZE,
1011 },
1012 .cipher_mode = DRV_CIPHER_XTS,
1013 .flow_mode = S_DIN_to_AES,
1014 .data_unit = 512,
1015 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001016 .std_body = CC_STD_NIST,
1017 .sec_func = true,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001018 },
1019 {
1020 .name = "xts4096(paes)",
1021 .driver_name = "xts-paes-du4096-ccree",
David Brazdil0f672f62019-12-10 10:32:29 +00001022 .blocksize = 1,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001023 .template_skcipher = {
1024 .setkey = cc_cipher_sethkey,
1025 .encrypt = cc_cipher_encrypt,
1026 .decrypt = cc_cipher_decrypt,
1027 .min_keysize = CC_HW_KEY_SIZE,
1028 .max_keysize = CC_HW_KEY_SIZE,
1029 .ivsize = AES_BLOCK_SIZE,
1030 },
1031 .cipher_mode = DRV_CIPHER_XTS,
1032 .flow_mode = S_DIN_to_AES,
1033 .data_unit = 4096,
1034 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001035 .std_body = CC_STD_NIST,
1036 .sec_func = true,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001037 },
1038 {
1039 .name = "essiv(paes)",
1040 .driver_name = "essiv-paes-ccree",
1041 .blocksize = AES_BLOCK_SIZE,
1042 .template_skcipher = {
1043 .setkey = cc_cipher_sethkey,
1044 .encrypt = cc_cipher_encrypt,
1045 .decrypt = cc_cipher_decrypt,
1046 .min_keysize = CC_HW_KEY_SIZE,
1047 .max_keysize = CC_HW_KEY_SIZE,
1048 .ivsize = AES_BLOCK_SIZE,
1049 },
1050 .cipher_mode = DRV_CIPHER_ESSIV,
1051 .flow_mode = S_DIN_to_AES,
1052 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001053 .std_body = CC_STD_NIST,
1054 .sec_func = true,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001055 },
1056 {
1057 .name = "essiv512(paes)",
1058 .driver_name = "essiv-paes-du512-ccree",
1059 .blocksize = AES_BLOCK_SIZE,
1060 .template_skcipher = {
1061 .setkey = cc_cipher_sethkey,
1062 .encrypt = cc_cipher_encrypt,
1063 .decrypt = cc_cipher_decrypt,
1064 .min_keysize = CC_HW_KEY_SIZE,
1065 .max_keysize = CC_HW_KEY_SIZE,
1066 .ivsize = AES_BLOCK_SIZE,
1067 },
1068 .cipher_mode = DRV_CIPHER_ESSIV,
1069 .flow_mode = S_DIN_to_AES,
1070 .data_unit = 512,
1071 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001072 .std_body = CC_STD_NIST,
1073 .sec_func = true,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001074 },
1075 {
1076 .name = "essiv4096(paes)",
1077 .driver_name = "essiv-paes-du4096-ccree",
1078 .blocksize = AES_BLOCK_SIZE,
1079 .template_skcipher = {
1080 .setkey = cc_cipher_sethkey,
1081 .encrypt = cc_cipher_encrypt,
1082 .decrypt = cc_cipher_decrypt,
1083 .min_keysize = CC_HW_KEY_SIZE,
1084 .max_keysize = CC_HW_KEY_SIZE,
1085 .ivsize = AES_BLOCK_SIZE,
1086 },
1087 .cipher_mode = DRV_CIPHER_ESSIV,
1088 .flow_mode = S_DIN_to_AES,
1089 .data_unit = 4096,
1090 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001091 .std_body = CC_STD_NIST,
1092 .sec_func = true,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001093 },
1094 {
1095 .name = "bitlocker(paes)",
1096 .driver_name = "bitlocker-paes-ccree",
1097 .blocksize = AES_BLOCK_SIZE,
1098 .template_skcipher = {
1099 .setkey = cc_cipher_sethkey,
1100 .encrypt = cc_cipher_encrypt,
1101 .decrypt = cc_cipher_decrypt,
1102 .min_keysize = CC_HW_KEY_SIZE,
1103 .max_keysize = CC_HW_KEY_SIZE,
1104 .ivsize = AES_BLOCK_SIZE,
1105 },
1106 .cipher_mode = DRV_CIPHER_BITLOCKER,
1107 .flow_mode = S_DIN_to_AES,
1108 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001109 .std_body = CC_STD_NIST,
1110 .sec_func = true,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001111 },
1112 {
1113 .name = "bitlocker512(paes)",
1114 .driver_name = "bitlocker-paes-du512-ccree",
1115 .blocksize = AES_BLOCK_SIZE,
1116 .template_skcipher = {
1117 .setkey = cc_cipher_sethkey,
1118 .encrypt = cc_cipher_encrypt,
1119 .decrypt = cc_cipher_decrypt,
1120 .min_keysize = CC_HW_KEY_SIZE,
1121 .max_keysize = CC_HW_KEY_SIZE,
1122 .ivsize = AES_BLOCK_SIZE,
1123 },
1124 .cipher_mode = DRV_CIPHER_BITLOCKER,
1125 .flow_mode = S_DIN_to_AES,
1126 .data_unit = 512,
1127 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001128 .std_body = CC_STD_NIST,
1129 .sec_func = true,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001130 },
1131 {
1132 .name = "bitlocker4096(paes)",
1133 .driver_name = "bitlocker-paes-du4096-ccree",
1134 .blocksize = AES_BLOCK_SIZE,
1135 .template_skcipher = {
1136 .setkey = cc_cipher_sethkey,
1137 .encrypt = cc_cipher_encrypt,
1138 .decrypt = cc_cipher_decrypt,
1139 .min_keysize = CC_HW_KEY_SIZE,
1140 .max_keysize = CC_HW_KEY_SIZE,
1141 .ivsize = AES_BLOCK_SIZE,
1142 },
1143 .cipher_mode = DRV_CIPHER_BITLOCKER,
1144 .flow_mode = S_DIN_to_AES,
1145 .data_unit = 4096,
1146 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001147 .std_body = CC_STD_NIST,
1148 .sec_func = true,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001149 },
1150 {
1151 .name = "ecb(paes)",
1152 .driver_name = "ecb-paes-ccree",
1153 .blocksize = AES_BLOCK_SIZE,
1154 .template_skcipher = {
1155 .setkey = cc_cipher_sethkey,
1156 .encrypt = cc_cipher_encrypt,
1157 .decrypt = cc_cipher_decrypt,
1158 .min_keysize = CC_HW_KEY_SIZE,
1159 .max_keysize = CC_HW_KEY_SIZE,
1160 .ivsize = 0,
1161 },
1162 .cipher_mode = DRV_CIPHER_ECB,
1163 .flow_mode = S_DIN_to_AES,
1164 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001165 .std_body = CC_STD_NIST,
1166 .sec_func = true,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001167 },
1168 {
1169 .name = "cbc(paes)",
1170 .driver_name = "cbc-paes-ccree",
1171 .blocksize = AES_BLOCK_SIZE,
1172 .template_skcipher = {
1173 .setkey = cc_cipher_sethkey,
1174 .encrypt = cc_cipher_encrypt,
1175 .decrypt = cc_cipher_decrypt,
1176 .min_keysize = CC_HW_KEY_SIZE,
1177 .max_keysize = CC_HW_KEY_SIZE,
1178 .ivsize = AES_BLOCK_SIZE,
1179 },
1180 .cipher_mode = DRV_CIPHER_CBC,
1181 .flow_mode = S_DIN_to_AES,
1182 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001183 .std_body = CC_STD_NIST,
1184 .sec_func = true,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001185 },
1186 {
1187 .name = "ofb(paes)",
1188 .driver_name = "ofb-paes-ccree",
1189 .blocksize = AES_BLOCK_SIZE,
1190 .template_skcipher = {
1191 .setkey = cc_cipher_sethkey,
1192 .encrypt = cc_cipher_encrypt,
1193 .decrypt = cc_cipher_decrypt,
1194 .min_keysize = CC_HW_KEY_SIZE,
1195 .max_keysize = CC_HW_KEY_SIZE,
1196 .ivsize = AES_BLOCK_SIZE,
1197 },
1198 .cipher_mode = DRV_CIPHER_OFB,
1199 .flow_mode = S_DIN_to_AES,
1200 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001201 .std_body = CC_STD_NIST,
1202 .sec_func = true,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001203 },
1204 {
1205 .name = "cts(cbc(paes))",
1206 .driver_name = "cts-cbc-paes-ccree",
1207 .blocksize = AES_BLOCK_SIZE,
1208 .template_skcipher = {
1209 .setkey = cc_cipher_sethkey,
1210 .encrypt = cc_cipher_encrypt,
1211 .decrypt = cc_cipher_decrypt,
1212 .min_keysize = CC_HW_KEY_SIZE,
1213 .max_keysize = CC_HW_KEY_SIZE,
1214 .ivsize = AES_BLOCK_SIZE,
1215 },
1216 .cipher_mode = DRV_CIPHER_CBC_CTS,
1217 .flow_mode = S_DIN_to_AES,
1218 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001219 .std_body = CC_STD_NIST,
1220 .sec_func = true,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001221 },
1222 {
1223 .name = "ctr(paes)",
1224 .driver_name = "ctr-paes-ccree",
1225 .blocksize = 1,
1226 .template_skcipher = {
1227 .setkey = cc_cipher_sethkey,
1228 .encrypt = cc_cipher_encrypt,
1229 .decrypt = cc_cipher_decrypt,
1230 .min_keysize = CC_HW_KEY_SIZE,
1231 .max_keysize = CC_HW_KEY_SIZE,
1232 .ivsize = AES_BLOCK_SIZE,
1233 },
1234 .cipher_mode = DRV_CIPHER_CTR,
1235 .flow_mode = S_DIN_to_AES,
1236 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001237 .std_body = CC_STD_NIST,
1238 .sec_func = true,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001239 },
1240 {
1241 .name = "xts(aes)",
1242 .driver_name = "xts-aes-ccree",
David Brazdil0f672f62019-12-10 10:32:29 +00001243 .blocksize = 1,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001244 .template_skcipher = {
1245 .setkey = cc_cipher_setkey,
1246 .encrypt = cc_cipher_encrypt,
1247 .decrypt = cc_cipher_decrypt,
1248 .min_keysize = AES_MIN_KEY_SIZE * 2,
1249 .max_keysize = AES_MAX_KEY_SIZE * 2,
1250 .ivsize = AES_BLOCK_SIZE,
1251 },
1252 .cipher_mode = DRV_CIPHER_XTS,
1253 .flow_mode = S_DIN_to_AES,
1254 .min_hw_rev = CC_HW_REV_630,
David Brazdil0f672f62019-12-10 10:32:29 +00001255 .std_body = CC_STD_NIST,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001256 },
1257 {
1258 .name = "xts512(aes)",
1259 .driver_name = "xts-aes-du512-ccree",
David Brazdil0f672f62019-12-10 10:32:29 +00001260 .blocksize = 1,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001261 .template_skcipher = {
1262 .setkey = cc_cipher_setkey,
1263 .encrypt = cc_cipher_encrypt,
1264 .decrypt = cc_cipher_decrypt,
1265 .min_keysize = AES_MIN_KEY_SIZE * 2,
1266 .max_keysize = AES_MAX_KEY_SIZE * 2,
1267 .ivsize = AES_BLOCK_SIZE,
1268 },
1269 .cipher_mode = DRV_CIPHER_XTS,
1270 .flow_mode = S_DIN_to_AES,
1271 .data_unit = 512,
1272 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001273 .std_body = CC_STD_NIST,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001274 },
1275 {
1276 .name = "xts4096(aes)",
1277 .driver_name = "xts-aes-du4096-ccree",
David Brazdil0f672f62019-12-10 10:32:29 +00001278 .blocksize = 1,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001279 .template_skcipher = {
1280 .setkey = cc_cipher_setkey,
1281 .encrypt = cc_cipher_encrypt,
1282 .decrypt = cc_cipher_decrypt,
1283 .min_keysize = AES_MIN_KEY_SIZE * 2,
1284 .max_keysize = AES_MAX_KEY_SIZE * 2,
1285 .ivsize = AES_BLOCK_SIZE,
1286 },
1287 .cipher_mode = DRV_CIPHER_XTS,
1288 .flow_mode = S_DIN_to_AES,
1289 .data_unit = 4096,
1290 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001291 .std_body = CC_STD_NIST,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001292 },
1293 {
1294 .name = "essiv(aes)",
1295 .driver_name = "essiv-aes-ccree",
1296 .blocksize = AES_BLOCK_SIZE,
1297 .template_skcipher = {
1298 .setkey = cc_cipher_setkey,
1299 .encrypt = cc_cipher_encrypt,
1300 .decrypt = cc_cipher_decrypt,
1301 .min_keysize = AES_MIN_KEY_SIZE * 2,
1302 .max_keysize = AES_MAX_KEY_SIZE * 2,
1303 .ivsize = AES_BLOCK_SIZE,
1304 },
1305 .cipher_mode = DRV_CIPHER_ESSIV,
1306 .flow_mode = S_DIN_to_AES,
1307 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001308 .std_body = CC_STD_NIST,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001309 },
1310 {
1311 .name = "essiv512(aes)",
1312 .driver_name = "essiv-aes-du512-ccree",
1313 .blocksize = AES_BLOCK_SIZE,
1314 .template_skcipher = {
1315 .setkey = cc_cipher_setkey,
1316 .encrypt = cc_cipher_encrypt,
1317 .decrypt = cc_cipher_decrypt,
1318 .min_keysize = AES_MIN_KEY_SIZE * 2,
1319 .max_keysize = AES_MAX_KEY_SIZE * 2,
1320 .ivsize = AES_BLOCK_SIZE,
1321 },
1322 .cipher_mode = DRV_CIPHER_ESSIV,
1323 .flow_mode = S_DIN_to_AES,
1324 .data_unit = 512,
1325 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001326 .std_body = CC_STD_NIST,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001327 },
1328 {
1329 .name = "essiv4096(aes)",
1330 .driver_name = "essiv-aes-du4096-ccree",
1331 .blocksize = AES_BLOCK_SIZE,
1332 .template_skcipher = {
1333 .setkey = cc_cipher_setkey,
1334 .encrypt = cc_cipher_encrypt,
1335 .decrypt = cc_cipher_decrypt,
1336 .min_keysize = AES_MIN_KEY_SIZE * 2,
1337 .max_keysize = AES_MAX_KEY_SIZE * 2,
1338 .ivsize = AES_BLOCK_SIZE,
1339 },
1340 .cipher_mode = DRV_CIPHER_ESSIV,
1341 .flow_mode = S_DIN_to_AES,
1342 .data_unit = 4096,
1343 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001344 .std_body = CC_STD_NIST,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001345 },
1346 {
1347 .name = "bitlocker(aes)",
1348 .driver_name = "bitlocker-aes-ccree",
1349 .blocksize = AES_BLOCK_SIZE,
1350 .template_skcipher = {
1351 .setkey = cc_cipher_setkey,
1352 .encrypt = cc_cipher_encrypt,
1353 .decrypt = cc_cipher_decrypt,
1354 .min_keysize = AES_MIN_KEY_SIZE * 2,
1355 .max_keysize = AES_MAX_KEY_SIZE * 2,
1356 .ivsize = AES_BLOCK_SIZE,
1357 },
1358 .cipher_mode = DRV_CIPHER_BITLOCKER,
1359 .flow_mode = S_DIN_to_AES,
1360 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001361 .std_body = CC_STD_NIST,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001362 },
1363 {
1364 .name = "bitlocker512(aes)",
1365 .driver_name = "bitlocker-aes-du512-ccree",
1366 .blocksize = AES_BLOCK_SIZE,
1367 .template_skcipher = {
1368 .setkey = cc_cipher_setkey,
1369 .encrypt = cc_cipher_encrypt,
1370 .decrypt = cc_cipher_decrypt,
1371 .min_keysize = AES_MIN_KEY_SIZE * 2,
1372 .max_keysize = AES_MAX_KEY_SIZE * 2,
1373 .ivsize = AES_BLOCK_SIZE,
1374 },
1375 .cipher_mode = DRV_CIPHER_BITLOCKER,
1376 .flow_mode = S_DIN_to_AES,
1377 .data_unit = 512,
1378 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001379 .std_body = CC_STD_NIST,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001380 },
1381 {
1382 .name = "bitlocker4096(aes)",
1383 .driver_name = "bitlocker-aes-du4096-ccree",
1384 .blocksize = AES_BLOCK_SIZE,
1385 .template_skcipher = {
1386 .setkey = cc_cipher_setkey,
1387 .encrypt = cc_cipher_encrypt,
1388 .decrypt = cc_cipher_decrypt,
1389 .min_keysize = AES_MIN_KEY_SIZE * 2,
1390 .max_keysize = AES_MAX_KEY_SIZE * 2,
1391 .ivsize = AES_BLOCK_SIZE,
1392 },
1393 .cipher_mode = DRV_CIPHER_BITLOCKER,
1394 .flow_mode = S_DIN_to_AES,
1395 .data_unit = 4096,
1396 .min_hw_rev = CC_HW_REV_712,
David Brazdil0f672f62019-12-10 10:32:29 +00001397 .std_body = CC_STD_NIST,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001398 },
1399 {
1400 .name = "ecb(aes)",
1401 .driver_name = "ecb-aes-ccree",
1402 .blocksize = AES_BLOCK_SIZE,
1403 .template_skcipher = {
1404 .setkey = cc_cipher_setkey,
1405 .encrypt = cc_cipher_encrypt,
1406 .decrypt = cc_cipher_decrypt,
1407 .min_keysize = AES_MIN_KEY_SIZE,
1408 .max_keysize = AES_MAX_KEY_SIZE,
1409 .ivsize = 0,
1410 },
1411 .cipher_mode = DRV_CIPHER_ECB,
1412 .flow_mode = S_DIN_to_AES,
1413 .min_hw_rev = CC_HW_REV_630,
David Brazdil0f672f62019-12-10 10:32:29 +00001414 .std_body = CC_STD_NIST,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001415 },
1416 {
1417 .name = "cbc(aes)",
1418 .driver_name = "cbc-aes-ccree",
1419 .blocksize = AES_BLOCK_SIZE,
1420 .template_skcipher = {
1421 .setkey = cc_cipher_setkey,
1422 .encrypt = cc_cipher_encrypt,
1423 .decrypt = cc_cipher_decrypt,
1424 .min_keysize = AES_MIN_KEY_SIZE,
1425 .max_keysize = AES_MAX_KEY_SIZE,
1426 .ivsize = AES_BLOCK_SIZE,
1427 },
1428 .cipher_mode = DRV_CIPHER_CBC,
1429 .flow_mode = S_DIN_to_AES,
1430 .min_hw_rev = CC_HW_REV_630,
David Brazdil0f672f62019-12-10 10:32:29 +00001431 .std_body = CC_STD_NIST,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001432 },
1433 {
1434 .name = "ofb(aes)",
1435 .driver_name = "ofb-aes-ccree",
1436 .blocksize = AES_BLOCK_SIZE,
1437 .template_skcipher = {
1438 .setkey = cc_cipher_setkey,
1439 .encrypt = cc_cipher_encrypt,
1440 .decrypt = cc_cipher_decrypt,
1441 .min_keysize = AES_MIN_KEY_SIZE,
1442 .max_keysize = AES_MAX_KEY_SIZE,
1443 .ivsize = AES_BLOCK_SIZE,
1444 },
1445 .cipher_mode = DRV_CIPHER_OFB,
1446 .flow_mode = S_DIN_to_AES,
1447 .min_hw_rev = CC_HW_REV_630,
David Brazdil0f672f62019-12-10 10:32:29 +00001448 .std_body = CC_STD_NIST,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001449 },
1450 {
1451 .name = "cts(cbc(aes))",
1452 .driver_name = "cts-cbc-aes-ccree",
1453 .blocksize = AES_BLOCK_SIZE,
1454 .template_skcipher = {
1455 .setkey = cc_cipher_setkey,
1456 .encrypt = cc_cipher_encrypt,
1457 .decrypt = cc_cipher_decrypt,
1458 .min_keysize = AES_MIN_KEY_SIZE,
1459 .max_keysize = AES_MAX_KEY_SIZE,
1460 .ivsize = AES_BLOCK_SIZE,
1461 },
1462 .cipher_mode = DRV_CIPHER_CBC_CTS,
1463 .flow_mode = S_DIN_to_AES,
1464 .min_hw_rev = CC_HW_REV_630,
David Brazdil0f672f62019-12-10 10:32:29 +00001465 .std_body = CC_STD_NIST,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001466 },
1467 {
1468 .name = "ctr(aes)",
1469 .driver_name = "ctr-aes-ccree",
1470 .blocksize = 1,
1471 .template_skcipher = {
1472 .setkey = cc_cipher_setkey,
1473 .encrypt = cc_cipher_encrypt,
1474 .decrypt = cc_cipher_decrypt,
1475 .min_keysize = AES_MIN_KEY_SIZE,
1476 .max_keysize = AES_MAX_KEY_SIZE,
1477 .ivsize = AES_BLOCK_SIZE,
1478 },
1479 .cipher_mode = DRV_CIPHER_CTR,
1480 .flow_mode = S_DIN_to_AES,
1481 .min_hw_rev = CC_HW_REV_630,
David Brazdil0f672f62019-12-10 10:32:29 +00001482 .std_body = CC_STD_NIST,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001483 },
1484 {
1485 .name = "cbc(des3_ede)",
1486 .driver_name = "cbc-3des-ccree",
1487 .blocksize = DES3_EDE_BLOCK_SIZE,
1488 .template_skcipher = {
1489 .setkey = cc_cipher_setkey,
1490 .encrypt = cc_cipher_encrypt,
1491 .decrypt = cc_cipher_decrypt,
1492 .min_keysize = DES3_EDE_KEY_SIZE,
1493 .max_keysize = DES3_EDE_KEY_SIZE,
1494 .ivsize = DES3_EDE_BLOCK_SIZE,
1495 },
1496 .cipher_mode = DRV_CIPHER_CBC,
1497 .flow_mode = S_DIN_to_DES,
1498 .min_hw_rev = CC_HW_REV_630,
David Brazdil0f672f62019-12-10 10:32:29 +00001499 .std_body = CC_STD_NIST,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001500 },
1501 {
1502 .name = "ecb(des3_ede)",
1503 .driver_name = "ecb-3des-ccree",
1504 .blocksize = DES3_EDE_BLOCK_SIZE,
1505 .template_skcipher = {
1506 .setkey = cc_cipher_setkey,
1507 .encrypt = cc_cipher_encrypt,
1508 .decrypt = cc_cipher_decrypt,
1509 .min_keysize = DES3_EDE_KEY_SIZE,
1510 .max_keysize = DES3_EDE_KEY_SIZE,
1511 .ivsize = 0,
1512 },
1513 .cipher_mode = DRV_CIPHER_ECB,
1514 .flow_mode = S_DIN_to_DES,
1515 .min_hw_rev = CC_HW_REV_630,
David Brazdil0f672f62019-12-10 10:32:29 +00001516 .std_body = CC_STD_NIST,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001517 },
1518 {
1519 .name = "cbc(des)",
1520 .driver_name = "cbc-des-ccree",
1521 .blocksize = DES_BLOCK_SIZE,
1522 .template_skcipher = {
1523 .setkey = cc_cipher_setkey,
1524 .encrypt = cc_cipher_encrypt,
1525 .decrypt = cc_cipher_decrypt,
1526 .min_keysize = DES_KEY_SIZE,
1527 .max_keysize = DES_KEY_SIZE,
1528 .ivsize = DES_BLOCK_SIZE,
1529 },
1530 .cipher_mode = DRV_CIPHER_CBC,
1531 .flow_mode = S_DIN_to_DES,
1532 .min_hw_rev = CC_HW_REV_630,
David Brazdil0f672f62019-12-10 10:32:29 +00001533 .std_body = CC_STD_NIST,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001534 },
1535 {
1536 .name = "ecb(des)",
1537 .driver_name = "ecb-des-ccree",
1538 .blocksize = DES_BLOCK_SIZE,
1539 .template_skcipher = {
1540 .setkey = cc_cipher_setkey,
1541 .encrypt = cc_cipher_encrypt,
1542 .decrypt = cc_cipher_decrypt,
1543 .min_keysize = DES_KEY_SIZE,
1544 .max_keysize = DES_KEY_SIZE,
1545 .ivsize = 0,
1546 },
1547 .cipher_mode = DRV_CIPHER_ECB,
1548 .flow_mode = S_DIN_to_DES,
1549 .min_hw_rev = CC_HW_REV_630,
David Brazdil0f672f62019-12-10 10:32:29 +00001550 .std_body = CC_STD_NIST,
1551 },
1552 {
1553 .name = "cbc(sm4)",
1554 .driver_name = "cbc-sm4-ccree",
1555 .blocksize = SM4_BLOCK_SIZE,
1556 .template_skcipher = {
1557 .setkey = cc_cipher_setkey,
1558 .encrypt = cc_cipher_encrypt,
1559 .decrypt = cc_cipher_decrypt,
1560 .min_keysize = SM4_KEY_SIZE,
1561 .max_keysize = SM4_KEY_SIZE,
1562 .ivsize = SM4_BLOCK_SIZE,
1563 },
1564 .cipher_mode = DRV_CIPHER_CBC,
1565 .flow_mode = S_DIN_to_SM4,
1566 .min_hw_rev = CC_HW_REV_713,
1567 .std_body = CC_STD_OSCCA,
1568 },
1569 {
1570 .name = "ecb(sm4)",
1571 .driver_name = "ecb-sm4-ccree",
1572 .blocksize = SM4_BLOCK_SIZE,
1573 .template_skcipher = {
1574 .setkey = cc_cipher_setkey,
1575 .encrypt = cc_cipher_encrypt,
1576 .decrypt = cc_cipher_decrypt,
1577 .min_keysize = SM4_KEY_SIZE,
1578 .max_keysize = SM4_KEY_SIZE,
1579 .ivsize = 0,
1580 },
1581 .cipher_mode = DRV_CIPHER_ECB,
1582 .flow_mode = S_DIN_to_SM4,
1583 .min_hw_rev = CC_HW_REV_713,
1584 .std_body = CC_STD_OSCCA,
1585 },
1586 {
1587 .name = "ctr(sm4)",
1588 .driver_name = "ctr-sm4-ccree",
1589 .blocksize = SM4_BLOCK_SIZE,
1590 .template_skcipher = {
1591 .setkey = cc_cipher_setkey,
1592 .encrypt = cc_cipher_encrypt,
1593 .decrypt = cc_cipher_decrypt,
1594 .min_keysize = SM4_KEY_SIZE,
1595 .max_keysize = SM4_KEY_SIZE,
1596 .ivsize = SM4_BLOCK_SIZE,
1597 },
1598 .cipher_mode = DRV_CIPHER_CTR,
1599 .flow_mode = S_DIN_to_SM4,
1600 .min_hw_rev = CC_HW_REV_713,
1601 .std_body = CC_STD_OSCCA,
1602 },
1603 {
1604 .name = "cbc(psm4)",
1605 .driver_name = "cbc-psm4-ccree",
1606 .blocksize = SM4_BLOCK_SIZE,
1607 .template_skcipher = {
1608 .setkey = cc_cipher_sethkey,
1609 .encrypt = cc_cipher_encrypt,
1610 .decrypt = cc_cipher_decrypt,
1611 .min_keysize = CC_HW_KEY_SIZE,
1612 .max_keysize = CC_HW_KEY_SIZE,
1613 .ivsize = SM4_BLOCK_SIZE,
1614 },
1615 .cipher_mode = DRV_CIPHER_CBC,
1616 .flow_mode = S_DIN_to_SM4,
1617 .min_hw_rev = CC_HW_REV_713,
1618 .std_body = CC_STD_OSCCA,
1619 .sec_func = true,
1620 },
1621 {
1622 .name = "ctr(psm4)",
1623 .driver_name = "ctr-psm4-ccree",
1624 .blocksize = SM4_BLOCK_SIZE,
1625 .template_skcipher = {
1626 .setkey = cc_cipher_sethkey,
1627 .encrypt = cc_cipher_encrypt,
1628 .decrypt = cc_cipher_decrypt,
1629 .min_keysize = CC_HW_KEY_SIZE,
1630 .max_keysize = CC_HW_KEY_SIZE,
1631 .ivsize = SM4_BLOCK_SIZE,
1632 },
1633 .cipher_mode = DRV_CIPHER_CTR,
1634 .flow_mode = S_DIN_to_SM4,
1635 .min_hw_rev = CC_HW_REV_713,
1636 .std_body = CC_STD_OSCCA,
1637 .sec_func = true,
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001638 },
1639};
1640
1641static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl,
1642 struct device *dev)
1643{
1644 struct cc_crypto_alg *t_alg;
1645 struct skcipher_alg *alg;
1646
1647 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
1648 if (!t_alg)
1649 return ERR_PTR(-ENOMEM);
1650
1651 alg = &t_alg->skcipher_alg;
1652
1653 memcpy(alg, &tmpl->template_skcipher, sizeof(*alg));
1654
1655 snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name);
1656 snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1657 tmpl->driver_name);
1658 alg->base.cra_module = THIS_MODULE;
1659 alg->base.cra_priority = CC_CRA_PRIO;
1660 alg->base.cra_blocksize = tmpl->blocksize;
1661 alg->base.cra_alignmask = 0;
1662 alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx);
1663
1664 alg->base.cra_init = cc_cipher_init;
1665 alg->base.cra_exit = cc_cipher_exit;
1666 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
1667
1668 t_alg->cipher_mode = tmpl->cipher_mode;
1669 t_alg->flow_mode = tmpl->flow_mode;
1670 t_alg->data_unit = tmpl->data_unit;
1671
1672 return t_alg;
1673}
1674
1675int cc_cipher_free(struct cc_drvdata *drvdata)
1676{
1677 struct cc_crypto_alg *t_alg, *n;
1678 struct cc_cipher_handle *cipher_handle = drvdata->cipher_handle;
1679
1680 if (cipher_handle) {
1681 /* Remove registered algs */
1682 list_for_each_entry_safe(t_alg, n, &cipher_handle->alg_list,
1683 entry) {
1684 crypto_unregister_skcipher(&t_alg->skcipher_alg);
1685 list_del(&t_alg->entry);
1686 kfree(t_alg);
1687 }
1688 kfree(cipher_handle);
1689 drvdata->cipher_handle = NULL;
1690 }
1691 return 0;
1692}
1693
1694int cc_cipher_alloc(struct cc_drvdata *drvdata)
1695{
1696 struct cc_cipher_handle *cipher_handle;
1697 struct cc_crypto_alg *t_alg;
1698 struct device *dev = drvdata_to_dev(drvdata);
1699 int rc = -ENOMEM;
1700 int alg;
1701
1702 cipher_handle = kmalloc(sizeof(*cipher_handle), GFP_KERNEL);
1703 if (!cipher_handle)
1704 return -ENOMEM;
1705
1706 INIT_LIST_HEAD(&cipher_handle->alg_list);
1707 drvdata->cipher_handle = cipher_handle;
1708
1709 /* Linux crypto */
1710 dev_dbg(dev, "Number of algorithms = %zu\n",
1711 ARRAY_SIZE(skcipher_algs));
1712 for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) {
David Brazdil0f672f62019-12-10 10:32:29 +00001713 if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) ||
1714 !(drvdata->std_bodies & skcipher_algs[alg].std_body) ||
1715 (drvdata->sec_disabled && skcipher_algs[alg].sec_func))
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001716 continue;
1717
1718 dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name);
1719 t_alg = cc_create_alg(&skcipher_algs[alg], dev);
1720 if (IS_ERR(t_alg)) {
1721 rc = PTR_ERR(t_alg);
1722 dev_err(dev, "%s alg allocation failed\n",
1723 skcipher_algs[alg].driver_name);
1724 goto fail0;
1725 }
1726 t_alg->drvdata = drvdata;
1727
1728 dev_dbg(dev, "registering %s\n",
1729 skcipher_algs[alg].driver_name);
1730 rc = crypto_register_skcipher(&t_alg->skcipher_alg);
1731 dev_dbg(dev, "%s alg registration rc = %x\n",
1732 t_alg->skcipher_alg.base.cra_driver_name, rc);
1733 if (rc) {
1734 dev_err(dev, "%s alg registration failed\n",
1735 t_alg->skcipher_alg.base.cra_driver_name);
1736 kfree(t_alg);
1737 goto fail0;
1738 } else {
1739 list_add_tail(&t_alg->entry,
1740 &cipher_handle->alg_list);
1741 dev_dbg(dev, "Registered %s\n",
1742 t_alg->skcipher_alg.base.cra_driver_name);
1743 }
1744 }
1745 return 0;
1746
1747fail0:
1748 cc_cipher_free(drvdata);
1749 return rc;
1750}