blob: 7c9da92af8952a7291824507c6bcd308512e5404 [file] [log] [blame]
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +02001/*
2 * Copyright (c) 2018, Arm Limited. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020011 .globl smc
12
13 .globl zeromem16
14 .globl memcpy16
15
16 .globl disable_mmu
17 .globl disable_mmu_icache
18
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020019func smc
20 smc #0
21endfunc smc
22
23/* -----------------------------------------------------------------------
24 * void zeromem16(void *mem, unsigned int length);
25 *
26 * Initialise a memory region to 0.
27 * The memory address must be 16-byte aligned.
28 * -----------------------------------------------------------------------
29 */
30func zeromem16
31#if ENABLE_ASSERTIONS
32 tst x0, #0xf
33 ASM_ASSERT(eq)
34#endif
35 add x2, x0, x1
36/* zero 16 bytes at a time */
37z_loop16:
38 sub x3, x2, x0
39 cmp x3, #16
40 b.lt z_loop1
41 stp xzr, xzr, [x0], #16
42 b z_loop16
43/* zero byte per byte */
44z_loop1:
45 cmp x0, x2
46 b.eq z_end
47 strb wzr, [x0], #1
48 b z_loop1
49z_end:
50 ret
51endfunc zeromem16
52
53
54/* --------------------------------------------------------------------------
55 * void memcpy16(void *dest, const void *src, unsigned int length)
56 *
57 * Copy length bytes from memory area src to memory area dest.
58 * The memory areas should not overlap.
59 * Destination and source addresses must be 16-byte aligned.
60 * --------------------------------------------------------------------------
61 */
62func memcpy16
63#if ENABLE_ASSERTIONS
64 orr x3, x0, x1
65 tst x3, #0xf
66 ASM_ASSERT(eq)
67#endif
68/* copy 16 bytes at a time */
69m_loop16:
70 cmp x2, #16
71 b.lt m_loop1
72 ldp x3, x4, [x1], #16
73 stp x3, x4, [x0], #16
74 sub x2, x2, #16
75 b m_loop16
76/* copy byte per byte */
77m_loop1:
78 cbz x2, m_end
79 ldrb w3, [x1], #1
80 strb w3, [x0], #1
81 subs x2, x2, #1
82 b.ne m_loop1
83m_end:
84 ret
85endfunc memcpy16
86
87/* ---------------------------------------------------------------------------
88 * Disable the MMU at the current exception level (NS-EL1 or EL2)
89 * This is implemented in assembler to ensure that the data cache is cleaned
90 * and invalidated after the MMU is disabled without any intervening cacheable
91 * data accesses
92 * ---------------------------------------------------------------------------
93 */
94func disable_mmu
95 mov x1, #(SCTLR_M_BIT | SCTLR_C_BIT)
96do_disable_mmu:
97 asm_read_sctlr_el1_or_el2
98 bic x0, x0, x1
99 asm_write_sctlr_el1_or_el2 x1
100 isb /* ensure MMU is off */
101 mov x0, #DCCISW /* DCache clean and invalidate */
102 b dcsw_op_all
103endfunc disable_mmu
104
105func disable_mmu_icache
106 mov x1, #(SCTLR_M_BIT | SCTLR_C_BIT | SCTLR_I_BIT)
107 b do_disable_mmu
108endfunc disable_mmu_icache
109
110/* Need this label for asm_read/write_sctlr_el1_or_el2 */
111dead:
112 b dead