blob: 069acec3df9f05df3bcea2ff507a8d8763fa60d0 [file] [log] [blame]
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
8 * Copyright (C) 2007 by Maciej W. Rozycki
9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
10 */
11#include <asm/asm.h>
12#include <asm/asm-offsets.h>
13#include <asm/export.h>
14#include <asm/regdef.h>
15
16#if LONGSIZE == 4
17#define LONG_S_L swl
18#define LONG_S_R swr
19#else
20#define LONG_S_L sdl
21#define LONG_S_R sdr
22#endif
23
24#ifdef CONFIG_CPU_MICROMIPS
25#define STORSIZE (LONGSIZE * 2)
26#define STORMASK (STORSIZE - 1)
27#define FILL64RG t8
28#define FILLPTRG t7
29#undef LONG_S
30#define LONG_S LONG_SP
31#else
32#define STORSIZE LONGSIZE
33#define STORMASK LONGMASK
34#define FILL64RG a1
35#define FILLPTRG t0
36#endif
37
38#define LEGACY_MODE 1
39#define EVA_MODE 2
40
41/*
42 * No need to protect it with EVA #ifdefery. The generated block of code
43 * will never be assembled if EVA is not enabled.
44 */
45#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
46#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
47
48#define EX(insn,reg,addr,handler) \
49 .if \mode == LEGACY_MODE; \
509: insn reg, addr; \
51 .else; \
529: ___BUILD_EVA_INSN(insn, reg, addr); \
53 .endif; \
54 .section __ex_table,"a"; \
55 PTR 9b, handler; \
56 .previous
57
58 .macro f_fill64 dst, offset, val, fixup, mode
59 EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup)
60 EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup)
61 EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup)
62 EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup)
63#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
64 EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup)
65 EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup)
66 EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup)
67 EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup)
68#endif
69#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
70 EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup)
71 EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup)
72 EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
73 EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
74 EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
75 EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
76 EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
77 EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
78#endif
79 .endm
80
81 .set noreorder
82 .align 5
83
84 /*
85 * Macro to generate the __bzero{,_user} symbol
86 * Arguments:
87 * mode: LEGACY_MODE or EVA_MODE
88 */
89 .macro __BUILD_BZERO mode
90 /* Initialize __memset if this is the first time we call this macro */
91 .ifnotdef __memset
92 .set __memset, 1
93 .hidden __memset /* Make sure it does not leak */
94 .endif
95
96 sltiu t0, a2, STORSIZE /* very small region? */
97 bnez t0, .Lsmall_memset\@
98 andi t0, a0, STORMASK /* aligned? */
99
100#ifdef CONFIG_CPU_MICROMIPS
101 move t8, a1 /* used by 'swp' instruction */
102 move t9, a1
103#endif
104#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
105 beqz t0, 1f
106 PTR_SUBU t0, STORSIZE /* alignment in bytes */
107#else
108 .set noat
109 li AT, STORSIZE
110 beqz t0, 1f
111 PTR_SUBU t0, AT /* alignment in bytes */
112 .set at
113#endif
114
115#ifndef CONFIG_CPU_MIPSR6
116 R10KCBARRIER(0(ra))
117#ifdef __MIPSEB__
118 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
119#else
120 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
121#endif
122 PTR_SUBU a0, t0 /* long align ptr */
123 PTR_ADDU a2, t0 /* correct size */
124
125#else /* CONFIG_CPU_MIPSR6 */
126#define STORE_BYTE(N) \
127 EX(sb, a1, N(a0), .Lbyte_fixup\@); \
128 beqz t0, 0f; \
129 PTR_ADDU t0, 1;
130
131 PTR_ADDU a2, t0 /* correct size */
132 PTR_ADDU t0, 1
133 STORE_BYTE(0)
134 STORE_BYTE(1)
135#if LONGSIZE == 4
136 EX(sb, a1, 2(a0), .Lbyte_fixup\@)
137#else
138 STORE_BYTE(2)
139 STORE_BYTE(3)
140 STORE_BYTE(4)
141 STORE_BYTE(5)
142 EX(sb, a1, 6(a0), .Lbyte_fixup\@)
143#endif
1440:
145 ori a0, STORMASK
146 xori a0, STORMASK
147 PTR_ADDIU a0, STORSIZE
148#endif /* CONFIG_CPU_MIPSR6 */
1491: ori t1, a2, 0x3f /* # of full blocks */
150 xori t1, 0x3f
151 beqz t1, .Lmemset_partial\@ /* no block to fill */
152 andi t0, a2, 0x40-STORSIZE
153
154 PTR_ADDU t1, a0 /* end address */
155 .set reorder
1561: PTR_ADDIU a0, 64
157 R10KCBARRIER(0(ra))
158 f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
159 bne t1, a0, 1b
160 .set noreorder
161
162.Lmemset_partial\@:
163 R10KCBARRIER(0(ra))
164 PTR_LA t1, 2f /* where to start */
165#ifdef CONFIG_CPU_MICROMIPS
166 LONG_SRL t7, t0, 1
167#endif
168#if LONGSIZE == 4
169 PTR_SUBU t1, FILLPTRG
170#else
171 .set noat
172 LONG_SRL AT, FILLPTRG, 1
173 PTR_SUBU t1, AT
174 .set at
175#endif
176 jr t1
177 PTR_ADDU a0, t0 /* dest ptr */
178
179 .set push
180 .set noreorder
181 .set nomacro
182 /* ... but first do longs ... */
183 f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
1842: .set pop
185 andi a2, STORMASK /* At most one long to go */
186
187 beqz a2, 1f
188#ifndef CONFIG_CPU_MIPSR6
189 PTR_ADDU a0, a2 /* What's left */
190 R10KCBARRIER(0(ra))
191#ifdef __MIPSEB__
192 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
193#else
194 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
195#endif
196#else
197 PTR_SUBU t0, $0, a2
198 move a2, zero /* No remaining longs */
199 PTR_ADDIU t0, 1
200 STORE_BYTE(0)
201 STORE_BYTE(1)
202#if LONGSIZE == 4
203 EX(sb, a1, 2(a0), .Lbyte_fixup\@)
204#else
205 STORE_BYTE(2)
206 STORE_BYTE(3)
207 STORE_BYTE(4)
208 STORE_BYTE(5)
209 EX(sb, a1, 6(a0), .Lbyte_fixup\@)
210#endif
2110:
212#endif
2131: jr ra
214 move a2, zero
215
216.Lsmall_memset\@:
217 beqz a2, 2f
218 PTR_ADDU t1, a0, a2
219
2201: PTR_ADDIU a0, 1 /* fill bytewise */
221 R10KCBARRIER(0(ra))
222 bne t1, a0, 1b
223 EX(sb, a1, -1(a0), .Lsmall_fixup\@)
224
2252: jr ra /* done */
226 move a2, zero
227 .if __memset == 1
228 END(memset)
229 .set __memset, 0
230 .hidden __memset
231 .endif
232
233#ifdef CONFIG_CPU_MIPSR6
234.Lbyte_fixup\@:
235 /*
236 * unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1
237 * a2 = a2 - t0 + 1
238 */
239 PTR_SUBU a2, t0
240 jr ra
241 PTR_ADDIU a2, 1
242#endif /* CONFIG_CPU_MIPSR6 */
243
244.Lfirst_fixup\@:
245 /* unset_bytes already in a2 */
246 jr ra
247 nop
248
249.Lfwd_fixup\@:
250 /*
251 * unset_bytes = partial_start_addr + #bytes - fault_addr
252 * a2 = t1 + (a2 & 3f) - $28->task->BUADDR
253 */
254 PTR_L t0, TI_TASK($28)
255 andi a2, 0x3f
256 LONG_L t0, THREAD_BUADDR(t0)
257 LONG_ADDU a2, t1
258 jr ra
259 LONG_SUBU a2, t0
260
261.Lpartial_fixup\@:
262 /*
263 * unset_bytes = partial_end_addr + #bytes - fault_addr
264 * a2 = a0 + (a2 & STORMASK) - $28->task->BUADDR
265 */
266 PTR_L t0, TI_TASK($28)
267 andi a2, STORMASK
268 LONG_L t0, THREAD_BUADDR(t0)
269 LONG_ADDU a2, a0
270 jr ra
271 LONG_SUBU a2, t0
272
273.Llast_fixup\@:
274 /* unset_bytes already in a2 */
275 jr ra
276 nop
277
278.Lsmall_fixup\@:
279 /*
280 * unset_bytes = end_addr - current_addr + 1
281 * a2 = t1 - a0 + 1
282 */
283 .set reorder
284 PTR_SUBU a2, t1, a0
285 PTR_ADDIU a2, 1
286 jr ra
287 .set noreorder
288
289 .endm
290
291/*
292 * memset(void *s, int c, size_t n)
293 *
294 * a0: start of area to clear
295 * a1: char to fill with
296 * a2: size of area to clear
297 */
298
299LEAF(memset)
300EXPORT_SYMBOL(memset)
301 beqz a1, 1f
302 move v0, a0 /* result */
303
304 andi a1, 0xff /* spread fillword */
305 LONG_SLL t1, a1, 8
306 or a1, t1
307 LONG_SLL t1, a1, 16
308#if LONGSIZE == 8
309 or a1, t1
310 LONG_SLL t1, a1, 32
311#endif
312 or a1, t1
3131:
314#ifndef CONFIG_EVA
315FEXPORT(__bzero)
316EXPORT_SYMBOL(__bzero)
317#else
318FEXPORT(__bzero_kernel)
319EXPORT_SYMBOL(__bzero_kernel)
320#endif
321 __BUILD_BZERO LEGACY_MODE
322
323#ifdef CONFIG_EVA
324LEAF(__bzero)
325EXPORT_SYMBOL(__bzero)
326 __BUILD_BZERO EVA_MODE
327END(__bzero)
328#endif