blob: 49b167f732151e2eda4d450ee8cb078ae64a2c24 [file] [log] [blame]
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * __get_user functions.
4 *
5 * (C) Copyright 1998 Linus Torvalds
6 * (C) Copyright 2005 Andi Kleen
7 * (C) Copyright 2008 Glauber Costa
8 *
9 * These functions have a non-standard call interface
10 * to make them more efficient, especially as they
11 * return an error value in addition to the "real"
12 * return value.
13 */
14
15/*
16 * __get_user_X
17 *
18 * Inputs: %[r|e]ax contains the address.
19 *
20 * Outputs: %[r|e]ax is error code (0 or -EFAULT)
21 * %[r|e]dx contains zero-extended value
22 * %ecx contains the high half for 32-bit __get_user_8
23 *
24 *
25 * These functions should not modify any other registers,
26 * as they get called from within inline assembly.
27 */
28
29#include <linux/linkage.h>
30#include <asm/page_types.h>
31#include <asm/errno.h>
32#include <asm/asm-offsets.h>
33#include <asm/thread_info.h>
34#include <asm/asm.h>
35#include <asm/smap.h>
36#include <asm/export.h>
37
38 .text
39ENTRY(__get_user_1)
40 mov PER_CPU_VAR(current_task), %_ASM_DX
41 cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
42 jae bad_get_user
43 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
44 and %_ASM_DX, %_ASM_AX
45 ASM_STAC
461: movzbl (%_ASM_AX),%edx
47 xor %eax,%eax
48 ASM_CLAC
49 ret
50ENDPROC(__get_user_1)
51EXPORT_SYMBOL(__get_user_1)
52
53ENTRY(__get_user_2)
54 add $1,%_ASM_AX
55 jc bad_get_user
56 mov PER_CPU_VAR(current_task), %_ASM_DX
57 cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
58 jae bad_get_user
59 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
60 and %_ASM_DX, %_ASM_AX
61 ASM_STAC
622: movzwl -1(%_ASM_AX),%edx
63 xor %eax,%eax
64 ASM_CLAC
65 ret
66ENDPROC(__get_user_2)
67EXPORT_SYMBOL(__get_user_2)
68
69ENTRY(__get_user_4)
70 add $3,%_ASM_AX
71 jc bad_get_user
72 mov PER_CPU_VAR(current_task), %_ASM_DX
73 cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
74 jae bad_get_user
75 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
76 and %_ASM_DX, %_ASM_AX
77 ASM_STAC
783: movl -3(%_ASM_AX),%edx
79 xor %eax,%eax
80 ASM_CLAC
81 ret
82ENDPROC(__get_user_4)
83EXPORT_SYMBOL(__get_user_4)
84
85ENTRY(__get_user_8)
86#ifdef CONFIG_X86_64
87 add $7,%_ASM_AX
88 jc bad_get_user
89 mov PER_CPU_VAR(current_task), %_ASM_DX
90 cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
91 jae bad_get_user
92 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
93 and %_ASM_DX, %_ASM_AX
94 ASM_STAC
954: movq -7(%_ASM_AX),%rdx
96 xor %eax,%eax
97 ASM_CLAC
98 ret
99#else
100 add $7,%_ASM_AX
101 jc bad_get_user_8
102 mov PER_CPU_VAR(current_task), %_ASM_DX
103 cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
104 jae bad_get_user_8
105 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
106 and %_ASM_DX, %_ASM_AX
107 ASM_STAC
1084: movl -7(%_ASM_AX),%edx
1095: movl -3(%_ASM_AX),%ecx
110 xor %eax,%eax
111 ASM_CLAC
112 ret
113#endif
114ENDPROC(__get_user_8)
115EXPORT_SYMBOL(__get_user_8)
116
117
118bad_get_user:
119 xor %edx,%edx
120 mov $(-EFAULT),%_ASM_AX
121 ASM_CLAC
122 ret
123END(bad_get_user)
124
125#ifdef CONFIG_X86_32
126bad_get_user_8:
127 xor %edx,%edx
128 xor %ecx,%ecx
129 mov $(-EFAULT),%_ASM_AX
130 ASM_CLAC
131 ret
132END(bad_get_user_8)
133#endif
134
135 _ASM_EXTABLE(1b,bad_get_user)
136 _ASM_EXTABLE(2b,bad_get_user)
137 _ASM_EXTABLE(3b,bad_get_user)
138#ifdef CONFIG_X86_64
139 _ASM_EXTABLE(4b,bad_get_user)
140#else
141 _ASM_EXTABLE(4b,bad_get_user_8)
142 _ASM_EXTABLE(5b,bad_get_user_8)
143#endif