blob: f012b7e28ad1d9d0914699462586b136b0411a16 [file] [log] [blame]
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001/*
2 * Poly1305 authenticator algorithm, RFC7539, SIMD glue code
3 *
4 * Copyright (C) 2015 Martin Willi
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 */
11
12#include <crypto/algapi.h>
13#include <crypto/internal/hash.h>
14#include <crypto/poly1305.h>
15#include <linux/crypto.h>
16#include <linux/kernel.h>
17#include <linux/module.h>
18#include <asm/fpu/api.h>
19#include <asm/simd.h>
20
21struct poly1305_simd_desc_ctx {
22 struct poly1305_desc_ctx base;
23 /* derived key u set? */
24 bool uset;
25#ifdef CONFIG_AS_AVX2
26 /* derived keys r^3, r^4 set? */
27 bool wset;
28#endif
29 /* derived Poly1305 key r^2 */
30 u32 u[5];
31 /* ... silently appended r^3 and r^4 when using AVX2 */
32};
33
34asmlinkage void poly1305_block_sse2(u32 *h, const u8 *src,
35 const u32 *r, unsigned int blocks);
36asmlinkage void poly1305_2block_sse2(u32 *h, const u8 *src, const u32 *r,
37 unsigned int blocks, const u32 *u);
38#ifdef CONFIG_AS_AVX2
39asmlinkage void poly1305_4block_avx2(u32 *h, const u8 *src, const u32 *r,
40 unsigned int blocks, const u32 *u);
41static bool poly1305_use_avx2;
42#endif
43
44static int poly1305_simd_init(struct shash_desc *desc)
45{
46 struct poly1305_simd_desc_ctx *sctx = shash_desc_ctx(desc);
47
48 sctx->uset = false;
49#ifdef CONFIG_AS_AVX2
50 sctx->wset = false;
51#endif
52
53 return crypto_poly1305_init(desc);
54}
55
56static void poly1305_simd_mult(u32 *a, const u32 *b)
57{
58 u8 m[POLY1305_BLOCK_SIZE];
59
60 memset(m, 0, sizeof(m));
61 /* The poly1305 block function adds a hi-bit to the accumulator which
62 * we don't need for key multiplication; compensate for it. */
63 a[4] -= 1 << 24;
64 poly1305_block_sse2(a, m, b, 1);
65}
66
67static unsigned int poly1305_simd_blocks(struct poly1305_desc_ctx *dctx,
68 const u8 *src, unsigned int srclen)
69{
70 struct poly1305_simd_desc_ctx *sctx;
71 unsigned int blocks, datalen;
72
73 BUILD_BUG_ON(offsetof(struct poly1305_simd_desc_ctx, base));
74 sctx = container_of(dctx, struct poly1305_simd_desc_ctx, base);
75
76 if (unlikely(!dctx->sset)) {
77 datalen = crypto_poly1305_setdesckey(dctx, src, srclen);
78 src += srclen - datalen;
79 srclen = datalen;
80 }
81
82#ifdef CONFIG_AS_AVX2
83 if (poly1305_use_avx2 && srclen >= POLY1305_BLOCK_SIZE * 4) {
84 if (unlikely(!sctx->wset)) {
85 if (!sctx->uset) {
86 memcpy(sctx->u, dctx->r, sizeof(sctx->u));
87 poly1305_simd_mult(sctx->u, dctx->r);
88 sctx->uset = true;
89 }
90 memcpy(sctx->u + 5, sctx->u, sizeof(sctx->u));
91 poly1305_simd_mult(sctx->u + 5, dctx->r);
92 memcpy(sctx->u + 10, sctx->u + 5, sizeof(sctx->u));
93 poly1305_simd_mult(sctx->u + 10, dctx->r);
94 sctx->wset = true;
95 }
96 blocks = srclen / (POLY1305_BLOCK_SIZE * 4);
97 poly1305_4block_avx2(dctx->h, src, dctx->r, blocks, sctx->u);
98 src += POLY1305_BLOCK_SIZE * 4 * blocks;
99 srclen -= POLY1305_BLOCK_SIZE * 4 * blocks;
100 }
101#endif
102 if (likely(srclen >= POLY1305_BLOCK_SIZE * 2)) {
103 if (unlikely(!sctx->uset)) {
104 memcpy(sctx->u, dctx->r, sizeof(sctx->u));
105 poly1305_simd_mult(sctx->u, dctx->r);
106 sctx->uset = true;
107 }
108 blocks = srclen / (POLY1305_BLOCK_SIZE * 2);
109 poly1305_2block_sse2(dctx->h, src, dctx->r, blocks, sctx->u);
110 src += POLY1305_BLOCK_SIZE * 2 * blocks;
111 srclen -= POLY1305_BLOCK_SIZE * 2 * blocks;
112 }
113 if (srclen >= POLY1305_BLOCK_SIZE) {
114 poly1305_block_sse2(dctx->h, src, dctx->r, 1);
115 srclen -= POLY1305_BLOCK_SIZE;
116 }
117 return srclen;
118}
119
120static int poly1305_simd_update(struct shash_desc *desc,
121 const u8 *src, unsigned int srclen)
122{
123 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
124 unsigned int bytes;
125
126 /* kernel_fpu_begin/end is costly, use fallback for small updates */
127 if (srclen <= 288 || !may_use_simd())
128 return crypto_poly1305_update(desc, src, srclen);
129
130 kernel_fpu_begin();
131
132 if (unlikely(dctx->buflen)) {
133 bytes = min(srclen, POLY1305_BLOCK_SIZE - dctx->buflen);
134 memcpy(dctx->buf + dctx->buflen, src, bytes);
135 src += bytes;
136 srclen -= bytes;
137 dctx->buflen += bytes;
138
139 if (dctx->buflen == POLY1305_BLOCK_SIZE) {
140 poly1305_simd_blocks(dctx, dctx->buf,
141 POLY1305_BLOCK_SIZE);
142 dctx->buflen = 0;
143 }
144 }
145
146 if (likely(srclen >= POLY1305_BLOCK_SIZE)) {
147 bytes = poly1305_simd_blocks(dctx, src, srclen);
148 src += srclen - bytes;
149 srclen = bytes;
150 }
151
152 kernel_fpu_end();
153
154 if (unlikely(srclen)) {
155 dctx->buflen = srclen;
156 memcpy(dctx->buf, src, srclen);
157 }
158
159 return 0;
160}
161
162static struct shash_alg alg = {
163 .digestsize = POLY1305_DIGEST_SIZE,
164 .init = poly1305_simd_init,
165 .update = poly1305_simd_update,
166 .final = crypto_poly1305_final,
167 .descsize = sizeof(struct poly1305_simd_desc_ctx),
168 .base = {
169 .cra_name = "poly1305",
170 .cra_driver_name = "poly1305-simd",
171 .cra_priority = 300,
172 .cra_blocksize = POLY1305_BLOCK_SIZE,
173 .cra_module = THIS_MODULE,
174 },
175};
176
177static int __init poly1305_simd_mod_init(void)
178{
179 if (!boot_cpu_has(X86_FEATURE_XMM2))
180 return -ENODEV;
181
182#ifdef CONFIG_AS_AVX2
183 poly1305_use_avx2 = boot_cpu_has(X86_FEATURE_AVX) &&
184 boot_cpu_has(X86_FEATURE_AVX2) &&
185 cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL);
186 alg.descsize = sizeof(struct poly1305_simd_desc_ctx);
187 if (poly1305_use_avx2)
188 alg.descsize += 10 * sizeof(u32);
189#endif
190 return crypto_register_shash(&alg);
191}
192
193static void __exit poly1305_simd_mod_exit(void)
194{
195 crypto_unregister_shash(&alg);
196}
197
198module_init(poly1305_simd_mod_init);
199module_exit(poly1305_simd_mod_exit);
200
201MODULE_LICENSE("GPL");
202MODULE_AUTHOR("Martin Willi <martin@strongswan.org>");
203MODULE_DESCRIPTION("Poly1305 authenticator");
204MODULE_ALIAS_CRYPTO("poly1305");
205MODULE_ALIAS_CRYPTO("poly1305-simd");