blob: e11d88e6b845018e8bf25e77b02d5149542e68af [file] [log] [blame]
gabor-mezei-armd1125342021-07-12 16:31:22 +02001/**
2 * Constant-time functions
3 *
4 * Copyright The Mbed TLS Contributors
5 * SPDX-License-Identifier: Apache-2.0
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License"); you may
8 * not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
15 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 */
19
Gilles Peskine449bd832023-01-11 14:50:10 +010020/*
Gabor Mezei642eeb22021-11-03 16:13:32 +010021 * The following functions are implemented without using comparison operators, as those
Gabor Mezeieab90bc2021-10-18 16:09:41 +020022 * might be translated to branches by some compilers on some platforms.
23 */
24
Dave Rodgman40a41d02023-05-17 11:59:56 +010025#include <limits.h>
26
gabor-mezei-armd1125342021-07-12 16:31:22 +020027#include "common.h"
Gabor Mezei22c9a6f2021-10-20 12:09:35 +020028#include "constant_time_internal.h"
Gabor Mezei765862c2021-10-19 12:22:25 +020029#include "mbedtls/constant_time.h"
gabor-mezei-arm1349ffd2021-09-27 14:28:31 +020030#include "mbedtls/error.h"
gabor-mezei-arm5b3a32d2021-09-29 10:50:31 +020031#include "mbedtls/platform_util.h"
gabor-mezei-armdb9a38c2021-09-27 11:28:54 +020032
gabor-mezei-arm1349ffd2021-09-27 14:28:31 +020033#if defined(MBEDTLS_SSL_TLS_C)
34#include "ssl_misc.h"
35#endif
36
gabor-mezei-arm5b3a32d2021-09-29 10:50:31 +020037#if defined(MBEDTLS_RSA_C)
38#include "mbedtls/rsa.h"
39#endif
40
gabor-mezei-armfdb71182021-09-27 16:11:12 +020041#include <string.h>
Andrzej Kurek8a045ce2022-12-23 11:00:06 -050042#if defined(MBEDTLS_USE_PSA_CRYPTO)
43#define PSA_TO_MBEDTLS_ERR(status) PSA_TO_MBEDTLS_ERR_LIST(status, \
44 psa_to_ssl_errors, \
45 psa_generic_status_to_mbedtls)
46#endif
gabor-mezei-arm3f90fd52021-09-27 12:55:33 +020047
Dave Rodgman36dfc5a2022-12-22 15:04:43 +000048/*
Dave Rodgman051225d2022-12-30 21:25:35 +000049 * Define MBEDTLS_EFFICIENT_UNALIGNED_VOLATILE_ACCESS where assembly is present to
50 * perform fast unaligned access to volatile data.
Dave Rodgman36dfc5a2022-12-22 15:04:43 +000051 *
52 * This is needed because mbedtls_get_unaligned_uintXX etc don't support volatile
53 * memory accesses.
54 *
Dave Rodgman051225d2022-12-30 21:25:35 +000055 * Some of these definitions could be moved into alignment.h but for now they are
56 * only used here.
Dave Rodgman36dfc5a2022-12-22 15:04:43 +000057 */
Dave Rodgman40a41d02023-05-17 11:59:56 +010058#if defined(MBEDTLS_EFFICIENT_UNALIGNED_ACCESS) && \
59 (defined(MBEDTLS_CT_ARM_ASM) || defined(MBEDTLS_CT_AARCH64_ASM))
Dave Rodgman36dfc5a2022-12-22 15:04:43 +000060
Dave Rodgman40a41d02023-05-17 11:59:56 +010061#define MBEDTLS_EFFICIENT_UNALIGNED_VOLATILE_ACCESS
62
Dave Rodgman36dfc5a2022-12-22 15:04:43 +000063static inline uint32_t mbedtls_get_unaligned_volatile_uint32(volatile const unsigned char *p)
64{
65 /* This is UB, even where it's safe:
66 * return *((volatile uint32_t*)p);
67 * so instead the same thing is expressed in assembly below.
68 */
69 uint32_t r;
Dave Rodgman40a41d02023-05-17 11:59:56 +010070#if defined(MBEDTLS_CT_ARM_ASM)
Dave Rodgman4610d4b2023-01-30 09:26:48 +000071 asm volatile ("ldr %0, [%1]" : "=r" (r) : "r" (p) :);
Dave Rodgman40a41d02023-05-17 11:59:56 +010072#elif defined(MBEDTLS_CT_AARCH64_ASM)
Dave Rodgman4610d4b2023-01-30 09:26:48 +000073 asm volatile ("ldr %w0, [%1]" : "=r" (r) : "r" (p) :);
Dave Rodgman40a41d02023-05-17 11:59:56 +010074#else
75#error No assembly defined for mbedtls_get_unaligned_volatile_uint32
Dave Rodgman36dfc5a2022-12-22 15:04:43 +000076#endif
Dave Rodgman051225d2022-12-30 21:25:35 +000077 return r;
Dave Rodgman36dfc5a2022-12-22 15:04:43 +000078}
Dave Rodgman40a41d02023-05-17 11:59:56 +010079#endif /* defined(MBEDTLS_EFFICIENT_UNALIGNED_ACCESS) &&
80 (defined(MBEDTLS_CT_ARM_ASM) || defined(MBEDTLS_CT_AARCH64_ASM)) */
Dave Rodgman36dfc5a2022-12-22 15:04:43 +000081
Gilles Peskine449bd832023-01-11 14:50:10 +010082int mbedtls_ct_memcmp(const void *a,
83 const void *b,
84 size_t n)
gabor-mezei-armdb9a38c2021-09-27 11:28:54 +020085{
Dave Rodgman36dfc5a2022-12-22 15:04:43 +000086 size_t i = 0;
Dave Rodgman7658b632023-01-11 17:39:33 +000087 /*
88 * `A` and `B` are cast to volatile to ensure that the compiler
89 * generates code that always fully reads both buffers.
90 * Otherwise it could generate a test to exit early if `diff` has all
91 * bits set early in the loop.
92 */
gabor-mezei-armdb9a38c2021-09-27 11:28:54 +020093 volatile const unsigned char *A = (volatile const unsigned char *) a;
94 volatile const unsigned char *B = (volatile const unsigned char *) b;
Dave Rodgman7658b632023-01-11 17:39:33 +000095 uint32_t diff = 0;
gabor-mezei-armdb9a38c2021-09-27 11:28:54 +020096
Dave Rodgman051225d2022-12-30 21:25:35 +000097#if defined(MBEDTLS_EFFICIENT_UNALIGNED_VOLATILE_ACCESS)
Dave Rodgman36dfc5a2022-12-22 15:04:43 +000098 for (; (i + 4) <= n; i += 4) {
99 uint32_t x = mbedtls_get_unaligned_volatile_uint32(A + i);
100 uint32_t y = mbedtls_get_unaligned_volatile_uint32(B + i);
101 diff |= x ^ y;
102 }
103#endif
104
105 for (; i < n; i++) {
gabor-mezei-armdb9a38c2021-09-27 11:28:54 +0200106 /* Read volatile data in order before computing diff.
107 * This avoids IAR compiler warning:
108 * 'the order of volatile accesses is undefined ..' */
109 unsigned char x = A[i], y = B[i];
110 diff |= x ^ y;
111 }
112
Gilles Peskine449bd832023-01-11 14:50:10 +0100113 return (int) diff;
gabor-mezei-armdb9a38c2021-09-27 11:28:54 +0200114}
115
Gabor Mezeie2123792021-10-18 17:05:06 +0200116#if defined(MBEDTLS_PKCS1_V15) && defined(MBEDTLS_RSA_C) && !defined(MBEDTLS_RSA_ALT)
117
Dave Rodgman15c142b2023-05-17 12:20:11 +0100118void mbedtls_ct_memmove_left(void *start, size_t total, size_t offset)
gabor-mezei-arm394aeaa2021-09-27 13:31:06 +0200119{
120 volatile unsigned char *buf = start;
Dave Rodgman15c142b2023-05-17 12:20:11 +0100121 for (size_t i = 0; i < total; i++) {
122 mbedtls_ct_condition_t no_op = mbedtls_ct_bool_gt(total - offset, i);
gabor-mezei-arm394aeaa2021-09-27 13:31:06 +0200123 /* The first `total - offset` passes are a no-op. The last
124 * `offset` passes shift the data one byte to the left and
125 * zero out the last byte. */
Dave Rodgman15c142b2023-05-17 12:20:11 +0100126 for (size_t n = 0; n < total - 1; n++) {
gabor-mezei-arm394aeaa2021-09-27 13:31:06 +0200127 unsigned char current = buf[n];
Dave Rodgman15c142b2023-05-17 12:20:11 +0100128 unsigned char next = buf[n+1];
Dave Rodgman2b4486a2023-05-17 15:51:59 +0100129 buf[n] = mbedtls_ct_uint_if(no_op, current, next);
gabor-mezei-arm394aeaa2021-09-27 13:31:06 +0200130 }
Dave Rodgman15c142b2023-05-17 12:20:11 +0100131 buf[total-1] = mbedtls_ct_uint_if0(no_op, buf[total-1]);
gabor-mezei-arm394aeaa2021-09-27 13:31:06 +0200132 }
133}
gabor-mezei-armdee0fd32021-09-27 13:34:25 +0200134
Gabor Mezeie2123792021-10-18 17:05:06 +0200135#endif /* MBEDTLS_PKCS1_V15 && MBEDTLS_RSA_C && ! MBEDTLS_RSA_ALT */
136
Dave Rodgman7fe6e6f2023-05-17 12:34:56 +0100137void mbedtls_ct_memcpy_if(mbedtls_ct_condition_t condition,
138 unsigned char *dest,
139 const unsigned char *src1,
140 const unsigned char *src2,
141 size_t len)
142{
143 const uint32_t mask = (uint32_t) condition;
144 const uint32_t not_mask = (uint32_t) ~mbedtls_ct_compiler_opaque(condition);
145
146 /* If src2 is NULL and condition == 0, then this function has no effect.
147 * In this case, copy from dest back into dest. */
148 if (src2 == NULL) {
149 src2 = dest;
150 }
151
152 /* dest[i] = c1 == c2 ? src[i] : dest[i] */
153 size_t i = 0;
154#if defined(MBEDTLS_EFFICIENT_UNALIGNED_ACCESS)
155 for (; (i + 4) <= len; i += 4) {
156 uint32_t a = mbedtls_get_unaligned_uint32(src1 + i) & mask;
157 uint32_t b = mbedtls_get_unaligned_uint32(src2 + i) & not_mask;
158 mbedtls_put_unaligned_uint32(dest + i, a | b);
159 }
160#endif /* MBEDTLS_EFFICIENT_UNALIGNED_ACCESS */
161 for (; i < len; i++) {
162 dest[i] = (src1[i] & mask) | (src2[i] & not_mask);
163 }
164}
165
Przemek Stekiel89ad6232022-09-27 13:36:12 +0200166#if defined(MBEDTLS_SSL_SOME_SUITES_USE_MAC)
Gabor Mezeie2123792021-10-18 17:05:06 +0200167
Gilles Peskine449bd832023-01-11 14:50:10 +0100168void mbedtls_ct_memcpy_offset(unsigned char *dest,
169 const unsigned char *src,
170 size_t offset,
171 size_t offset_min,
172 size_t offset_max,
173 size_t len)
gabor-mezei-arm0e7f71e2021-09-27 13:57:45 +0200174{
Gabor Mezei63bbba52021-10-18 16:17:57 +0200175 size_t offsetval;
gabor-mezei-arm0e7f71e2021-09-27 13:57:45 +0200176
Gilles Peskine449bd832023-01-11 14:50:10 +0100177 for (offsetval = offset_min; offsetval <= offset_max; offsetval++) {
Dave Rodgman199a2c52023-05-17 15:46:23 +0100178 mbedtls_ct_memcpy_if(mbedtls_ct_bool_eq(offsetval, offset), dest, src + offsetval, NULL,
179 len);
gabor-mezei-arm0e7f71e2021-09-27 13:57:45 +0200180 }
181}
gabor-mezei-arm1349ffd2021-09-27 14:28:31 +0200182
Przemek Stekiel89ad6232022-09-27 13:36:12 +0200183#endif /* MBEDTLS_SSL_SOME_SUITES_USE_MAC */
gabor-mezei-arm40a49252021-09-27 15:33:35 +0200184
Dave Rodgmandebf8672023-05-17 12:12:44 +0100185#if defined(MBEDTLS_PKCS1_V15) && defined(MBEDTLS_RSA_C) && !defined(MBEDTLS_RSA_ALT)
186
187void mbedtls_ct_zeroize_if(mbedtls_ct_condition_t condition, void *buf, size_t len)
188{
189 uint32_t mask = (uint32_t) ~condition;
190 uint8_t *p = (uint8_t *) buf;
191 size_t i = 0;
192#if defined(MBEDTLS_EFFICIENT_UNALIGNED_ACCESS)
193 for (; (i + 4) <= len; i += 4) {
194 mbedtls_put_unaligned_uint32((void *) (p + i),
195 mbedtls_get_unaligned_uint32((void *) (p + i)) & mask);
196 }
197#endif
198 for (; i < len; i++) {
199 p[i] = p[i] & mask;
200 }
201}
202
203#endif /* defined(MBEDTLS_PKCS1_V15) && defined(MBEDTLS_RSA_C) && !defined(MBEDTLS_RSA_ALT) */