David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2 | /* Copyright (C) 2004-2006, Advanced Micro Devices, Inc. |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 3 | */ |
| 4 | |
| 5 | #include <linux/module.h> |
| 6 | #include <linux/kernel.h> |
| 7 | #include <linux/pci.h> |
| 8 | #include <linux/pci_ids.h> |
| 9 | #include <linux/crypto.h> |
| 10 | #include <linux/spinlock.h> |
| 11 | #include <crypto/algapi.h> |
| 12 | #include <crypto/aes.h> |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 13 | #include <crypto/internal/skcipher.h> |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 14 | |
| 15 | #include <linux/io.h> |
| 16 | #include <linux/delay.h> |
| 17 | |
| 18 | #include "geode-aes.h" |
| 19 | |
| 20 | /* Static structures */ |
| 21 | |
| 22 | static void __iomem *_iobase; |
| 23 | static spinlock_t lock; |
| 24 | |
| 25 | /* Write a 128 bit field (either a writable key or IV) */ |
| 26 | static inline void |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 27 | _writefield(u32 offset, const void *value) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 28 | { |
| 29 | int i; |
| 30 | |
| 31 | for (i = 0; i < 4; i++) |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 32 | iowrite32(((const u32 *) value)[i], _iobase + offset + (i * 4)); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 33 | } |
| 34 | |
| 35 | /* Read a 128 bit field (either a writable key or IV) */ |
| 36 | static inline void |
| 37 | _readfield(u32 offset, void *value) |
| 38 | { |
| 39 | int i; |
| 40 | |
| 41 | for (i = 0; i < 4; i++) |
| 42 | ((u32 *) value)[i] = ioread32(_iobase + offset + (i * 4)); |
| 43 | } |
| 44 | |
| 45 | static int |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 46 | do_crypt(const void *src, void *dst, u32 len, u32 flags) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 47 | { |
| 48 | u32 status; |
| 49 | u32 counter = AES_OP_TIMEOUT; |
| 50 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 51 | iowrite32(virt_to_phys((void *)src), _iobase + AES_SOURCEA_REG); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 52 | iowrite32(virt_to_phys(dst), _iobase + AES_DSTA_REG); |
| 53 | iowrite32(len, _iobase + AES_LENA_REG); |
| 54 | |
| 55 | /* Start the operation */ |
| 56 | iowrite32(AES_CTRL_START | flags, _iobase + AES_CTRLA_REG); |
| 57 | |
| 58 | do { |
| 59 | status = ioread32(_iobase + AES_INTR_REG); |
| 60 | cpu_relax(); |
| 61 | } while (!(status & AES_INTRA_PENDING) && --counter); |
| 62 | |
| 63 | /* Clear the event */ |
| 64 | iowrite32((status & 0xFF) | AES_INTRA_PENDING, _iobase + AES_INTR_REG); |
| 65 | return counter ? 0 : 1; |
| 66 | } |
| 67 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 68 | static void |
| 69 | geode_aes_crypt(const struct geode_aes_tfm_ctx *tctx, const void *src, |
| 70 | void *dst, u32 len, u8 *iv, int mode, int dir) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 71 | { |
| 72 | u32 flags = 0; |
| 73 | unsigned long iflags; |
| 74 | int ret; |
| 75 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 76 | /* If the source and destination is the same, then |
| 77 | * we need to turn on the coherent flags, otherwise |
| 78 | * we don't need to worry |
| 79 | */ |
| 80 | |
| 81 | flags |= (AES_CTRL_DCA | AES_CTRL_SCA); |
| 82 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 83 | if (dir == AES_DIR_ENCRYPT) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 84 | flags |= AES_CTRL_ENCRYPT; |
| 85 | |
| 86 | /* Start the critical section */ |
| 87 | |
| 88 | spin_lock_irqsave(&lock, iflags); |
| 89 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 90 | if (mode == AES_MODE_CBC) { |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 91 | flags |= AES_CTRL_CBC; |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 92 | _writefield(AES_WRITEIV0_REG, iv); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 93 | } |
| 94 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 95 | flags |= AES_CTRL_WRKEY; |
| 96 | _writefield(AES_WRITEKEY0_REG, tctx->key); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 97 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 98 | ret = do_crypt(src, dst, len, flags); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 99 | BUG_ON(ret); |
| 100 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 101 | if (mode == AES_MODE_CBC) |
| 102 | _readfield(AES_WRITEIV0_REG, iv); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 103 | |
| 104 | spin_unlock_irqrestore(&lock, iflags); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 105 | } |
| 106 | |
| 107 | /* CRYPTO-API Functions */ |
| 108 | |
| 109 | static int geode_setkey_cip(struct crypto_tfm *tfm, const u8 *key, |
| 110 | unsigned int len) |
| 111 | { |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 112 | struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 113 | unsigned int ret; |
| 114 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 115 | tctx->keylen = len; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 116 | |
| 117 | if (len == AES_KEYSIZE_128) { |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 118 | memcpy(tctx->key, key, len); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 119 | return 0; |
| 120 | } |
| 121 | |
| 122 | if (len != AES_KEYSIZE_192 && len != AES_KEYSIZE_256) { |
| 123 | /* not supported at all */ |
| 124 | tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
| 125 | return -EINVAL; |
| 126 | } |
| 127 | |
| 128 | /* |
| 129 | * The requested key size is not supported by HW, do a fallback |
| 130 | */ |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 131 | tctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; |
| 132 | tctx->fallback.cip->base.crt_flags |= |
| 133 | (tfm->crt_flags & CRYPTO_TFM_REQ_MASK); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 134 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 135 | ret = crypto_cipher_setkey(tctx->fallback.cip, key, len); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 136 | if (ret) { |
| 137 | tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 138 | tfm->crt_flags |= (tctx->fallback.cip->base.crt_flags & |
| 139 | CRYPTO_TFM_RES_MASK); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 140 | } |
| 141 | return ret; |
| 142 | } |
| 143 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 144 | static int geode_setkey_skcipher(struct crypto_skcipher *tfm, const u8 *key, |
| 145 | unsigned int len) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 146 | { |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 147 | struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 148 | unsigned int ret; |
| 149 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 150 | tctx->keylen = len; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 151 | |
| 152 | if (len == AES_KEYSIZE_128) { |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 153 | memcpy(tctx->key, key, len); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 154 | return 0; |
| 155 | } |
| 156 | |
| 157 | if (len != AES_KEYSIZE_192 && len != AES_KEYSIZE_256) { |
| 158 | /* not supported at all */ |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 159 | crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 160 | return -EINVAL; |
| 161 | } |
| 162 | |
| 163 | /* |
| 164 | * The requested key size is not supported by HW, do a fallback |
| 165 | */ |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 166 | crypto_skcipher_clear_flags(tctx->fallback.skcipher, |
| 167 | CRYPTO_TFM_REQ_MASK); |
| 168 | crypto_skcipher_set_flags(tctx->fallback.skcipher, |
| 169 | crypto_skcipher_get_flags(tfm) & |
| 170 | CRYPTO_TFM_REQ_MASK); |
| 171 | ret = crypto_skcipher_setkey(tctx->fallback.skcipher, key, len); |
| 172 | crypto_skcipher_set_flags(tfm, |
| 173 | crypto_skcipher_get_flags(tctx->fallback.skcipher) & |
| 174 | CRYPTO_TFM_RES_MASK); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 175 | return ret; |
| 176 | } |
| 177 | |
| 178 | static void |
| 179 | geode_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
| 180 | { |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 181 | const struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 182 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 183 | if (unlikely(tctx->keylen != AES_KEYSIZE_128)) { |
| 184 | crypto_cipher_encrypt_one(tctx->fallback.cip, out, in); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 185 | return; |
| 186 | } |
| 187 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 188 | geode_aes_crypt(tctx, in, out, AES_BLOCK_SIZE, NULL, |
| 189 | AES_MODE_ECB, AES_DIR_ENCRYPT); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 190 | } |
| 191 | |
| 192 | |
| 193 | static void |
| 194 | geode_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
| 195 | { |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 196 | const struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 197 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 198 | if (unlikely(tctx->keylen != AES_KEYSIZE_128)) { |
| 199 | crypto_cipher_decrypt_one(tctx->fallback.cip, out, in); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 200 | return; |
| 201 | } |
| 202 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 203 | geode_aes_crypt(tctx, in, out, AES_BLOCK_SIZE, NULL, |
| 204 | AES_MODE_ECB, AES_DIR_DECRYPT); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 205 | } |
| 206 | |
| 207 | static int fallback_init_cip(struct crypto_tfm *tfm) |
| 208 | { |
| 209 | const char *name = crypto_tfm_alg_name(tfm); |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 210 | struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 211 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 212 | tctx->fallback.cip = crypto_alloc_cipher(name, 0, |
| 213 | CRYPTO_ALG_NEED_FALLBACK); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 214 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 215 | if (IS_ERR(tctx->fallback.cip)) { |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 216 | printk(KERN_ERR "Error allocating fallback algo %s\n", name); |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 217 | return PTR_ERR(tctx->fallback.cip); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 218 | } |
| 219 | |
| 220 | return 0; |
| 221 | } |
| 222 | |
| 223 | static void fallback_exit_cip(struct crypto_tfm *tfm) |
| 224 | { |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 225 | struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 226 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 227 | crypto_free_cipher(tctx->fallback.cip); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 228 | } |
| 229 | |
| 230 | static struct crypto_alg geode_alg = { |
| 231 | .cra_name = "aes", |
| 232 | .cra_driver_name = "geode-aes", |
| 233 | .cra_priority = 300, |
| 234 | .cra_alignmask = 15, |
| 235 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER | |
| 236 | CRYPTO_ALG_NEED_FALLBACK, |
| 237 | .cra_init = fallback_init_cip, |
| 238 | .cra_exit = fallback_exit_cip, |
| 239 | .cra_blocksize = AES_BLOCK_SIZE, |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 240 | .cra_ctxsize = sizeof(struct geode_aes_tfm_ctx), |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 241 | .cra_module = THIS_MODULE, |
| 242 | .cra_u = { |
| 243 | .cipher = { |
| 244 | .cia_min_keysize = AES_MIN_KEY_SIZE, |
| 245 | .cia_max_keysize = AES_MAX_KEY_SIZE, |
| 246 | .cia_setkey = geode_setkey_cip, |
| 247 | .cia_encrypt = geode_encrypt, |
| 248 | .cia_decrypt = geode_decrypt |
| 249 | } |
| 250 | } |
| 251 | }; |
| 252 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 253 | static int geode_init_skcipher(struct crypto_skcipher *tfm) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 254 | { |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 255 | const char *name = crypto_tfm_alg_name(&tfm->base); |
| 256 | struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 257 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 258 | tctx->fallback.skcipher = |
| 259 | crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK | |
| 260 | CRYPTO_ALG_ASYNC); |
| 261 | if (IS_ERR(tctx->fallback.skcipher)) { |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 262 | printk(KERN_ERR "Error allocating fallback algo %s\n", name); |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 263 | return PTR_ERR(tctx->fallback.skcipher); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 264 | } |
| 265 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 266 | crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) + |
| 267 | crypto_skcipher_reqsize(tctx->fallback.skcipher)); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 268 | return 0; |
| 269 | } |
| 270 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 271 | static void geode_exit_skcipher(struct crypto_skcipher *tfm) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 272 | { |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 273 | struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 274 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 275 | crypto_free_skcipher(tctx->fallback.skcipher); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 276 | } |
| 277 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 278 | static int geode_skcipher_crypt(struct skcipher_request *req, int mode, int dir) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 279 | { |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 280 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
| 281 | const struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); |
| 282 | struct skcipher_walk walk; |
| 283 | unsigned int nbytes; |
| 284 | int err; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 285 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 286 | if (unlikely(tctx->keylen != AES_KEYSIZE_128)) { |
| 287 | struct skcipher_request *subreq = skcipher_request_ctx(req); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 288 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 289 | *subreq = *req; |
| 290 | skcipher_request_set_tfm(subreq, tctx->fallback.skcipher); |
| 291 | if (dir == AES_DIR_DECRYPT) |
| 292 | return crypto_skcipher_decrypt(subreq); |
| 293 | else |
| 294 | return crypto_skcipher_encrypt(subreq); |
| 295 | } |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 296 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 297 | err = skcipher_walk_virt(&walk, req, false); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 298 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 299 | while ((nbytes = walk.nbytes) != 0) { |
| 300 | geode_aes_crypt(tctx, walk.src.virt.addr, walk.dst.virt.addr, |
| 301 | round_down(nbytes, AES_BLOCK_SIZE), |
| 302 | walk.iv, mode, dir); |
| 303 | err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 304 | } |
| 305 | |
| 306 | return err; |
| 307 | } |
| 308 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 309 | static int geode_cbc_encrypt(struct skcipher_request *req) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 310 | { |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 311 | return geode_skcipher_crypt(req, AES_MODE_CBC, AES_DIR_ENCRYPT); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 312 | } |
| 313 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 314 | static int geode_cbc_decrypt(struct skcipher_request *req) |
| 315 | { |
| 316 | return geode_skcipher_crypt(req, AES_MODE_CBC, AES_DIR_DECRYPT); |
| 317 | } |
| 318 | |
| 319 | static int geode_ecb_encrypt(struct skcipher_request *req) |
| 320 | { |
| 321 | return geode_skcipher_crypt(req, AES_MODE_ECB, AES_DIR_ENCRYPT); |
| 322 | } |
| 323 | |
| 324 | static int geode_ecb_decrypt(struct skcipher_request *req) |
| 325 | { |
| 326 | return geode_skcipher_crypt(req, AES_MODE_ECB, AES_DIR_DECRYPT); |
| 327 | } |
| 328 | |
| 329 | static struct skcipher_alg geode_skcipher_algs[] = { |
| 330 | { |
| 331 | .base.cra_name = "cbc(aes)", |
| 332 | .base.cra_driver_name = "cbc-aes-geode", |
| 333 | .base.cra_priority = 400, |
| 334 | .base.cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | |
| 335 | CRYPTO_ALG_NEED_FALLBACK, |
| 336 | .base.cra_blocksize = AES_BLOCK_SIZE, |
| 337 | .base.cra_ctxsize = sizeof(struct geode_aes_tfm_ctx), |
| 338 | .base.cra_alignmask = 15, |
| 339 | .base.cra_module = THIS_MODULE, |
| 340 | .init = geode_init_skcipher, |
| 341 | .exit = geode_exit_skcipher, |
| 342 | .setkey = geode_setkey_skcipher, |
| 343 | .encrypt = geode_cbc_encrypt, |
| 344 | .decrypt = geode_cbc_decrypt, |
| 345 | .min_keysize = AES_MIN_KEY_SIZE, |
| 346 | .max_keysize = AES_MAX_KEY_SIZE, |
| 347 | .ivsize = AES_BLOCK_SIZE, |
| 348 | }, { |
| 349 | .base.cra_name = "ecb(aes)", |
| 350 | .base.cra_driver_name = "ecb-aes-geode", |
| 351 | .base.cra_priority = 400, |
| 352 | .base.cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | |
| 353 | CRYPTO_ALG_NEED_FALLBACK, |
| 354 | .base.cra_blocksize = AES_BLOCK_SIZE, |
| 355 | .base.cra_ctxsize = sizeof(struct geode_aes_tfm_ctx), |
| 356 | .base.cra_alignmask = 15, |
| 357 | .base.cra_module = THIS_MODULE, |
| 358 | .init = geode_init_skcipher, |
| 359 | .exit = geode_exit_skcipher, |
| 360 | .setkey = geode_setkey_skcipher, |
| 361 | .encrypt = geode_ecb_encrypt, |
| 362 | .decrypt = geode_ecb_decrypt, |
| 363 | .min_keysize = AES_MIN_KEY_SIZE, |
| 364 | .max_keysize = AES_MAX_KEY_SIZE, |
| 365 | }, |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 366 | }; |
| 367 | |
| 368 | static void geode_aes_remove(struct pci_dev *dev) |
| 369 | { |
| 370 | crypto_unregister_alg(&geode_alg); |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 371 | crypto_unregister_skciphers(geode_skcipher_algs, |
| 372 | ARRAY_SIZE(geode_skcipher_algs)); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 373 | |
| 374 | pci_iounmap(dev, _iobase); |
| 375 | _iobase = NULL; |
| 376 | |
| 377 | pci_release_regions(dev); |
| 378 | pci_disable_device(dev); |
| 379 | } |
| 380 | |
| 381 | |
| 382 | static int geode_aes_probe(struct pci_dev *dev, const struct pci_device_id *id) |
| 383 | { |
| 384 | int ret; |
| 385 | |
| 386 | ret = pci_enable_device(dev); |
| 387 | if (ret) |
| 388 | return ret; |
| 389 | |
| 390 | ret = pci_request_regions(dev, "geode-aes"); |
| 391 | if (ret) |
| 392 | goto eenable; |
| 393 | |
| 394 | _iobase = pci_iomap(dev, 0, 0); |
| 395 | |
| 396 | if (_iobase == NULL) { |
| 397 | ret = -ENOMEM; |
| 398 | goto erequest; |
| 399 | } |
| 400 | |
| 401 | spin_lock_init(&lock); |
| 402 | |
| 403 | /* Clear any pending activity */ |
| 404 | iowrite32(AES_INTR_PENDING | AES_INTR_MASK, _iobase + AES_INTR_REG); |
| 405 | |
| 406 | ret = crypto_register_alg(&geode_alg); |
| 407 | if (ret) |
| 408 | goto eiomap; |
| 409 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 410 | ret = crypto_register_skciphers(geode_skcipher_algs, |
| 411 | ARRAY_SIZE(geode_skcipher_algs)); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 412 | if (ret) |
| 413 | goto ealg; |
| 414 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 415 | dev_notice(&dev->dev, "GEODE AES engine enabled.\n"); |
| 416 | return 0; |
| 417 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 418 | ealg: |
| 419 | crypto_unregister_alg(&geode_alg); |
| 420 | |
| 421 | eiomap: |
| 422 | pci_iounmap(dev, _iobase); |
| 423 | |
| 424 | erequest: |
| 425 | pci_release_regions(dev); |
| 426 | |
| 427 | eenable: |
| 428 | pci_disable_device(dev); |
| 429 | |
| 430 | dev_err(&dev->dev, "GEODE AES initialization failed.\n"); |
| 431 | return ret; |
| 432 | } |
| 433 | |
| 434 | static struct pci_device_id geode_aes_tbl[] = { |
| 435 | { PCI_VDEVICE(AMD, PCI_DEVICE_ID_AMD_LX_AES), }, |
| 436 | { 0, } |
| 437 | }; |
| 438 | |
| 439 | MODULE_DEVICE_TABLE(pci, geode_aes_tbl); |
| 440 | |
| 441 | static struct pci_driver geode_aes_driver = { |
| 442 | .name = "Geode LX AES", |
| 443 | .id_table = geode_aes_tbl, |
| 444 | .probe = geode_aes_probe, |
| 445 | .remove = geode_aes_remove, |
| 446 | }; |
| 447 | |
| 448 | module_pci_driver(geode_aes_driver); |
| 449 | |
| 450 | MODULE_AUTHOR("Advanced Micro Devices, Inc."); |
| 451 | MODULE_DESCRIPTION("Geode LX Hardware AES driver"); |
| 452 | MODULE_LICENSE("GPL"); |