blob: ab37be9c3f7df9b1501433b012b39f67cc23b029 [file] [log] [blame]
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +02001/*
2 * Copyright (c) 2018, Arm Limited. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10
11 .globl zeromem
12 .globl memcpy4
13 .globl disable_mmu_icache
14
15/* -----------------------------------------------------------------------
16 * void zeromem(void *mem, unsigned int length);
17 *
18 * Initialise a memory region to 0.
19 * The memory address and length must be 4-byte aligned.
20 * -----------------------------------------------------------------------
21 */
22func zeromem
23#if ENABLE_ASSERTIONS
24 tst r0, #0x3
25 ASM_ASSERT(eq)
26 tst r1, #0x3
27 ASM_ASSERT(eq)
28#endif
29 add r2, r0, r1
30 mov r1, #0
31z_loop:
32 cmp r2, r0
33 beq z_end
34 str r1, [r0], #4
35 b z_loop
36z_end:
37 bx lr
38endfunc zeromem
39
40/* --------------------------------------------------------------------------
41 * void memcpy4(void *dest, const void *src, unsigned int length)
42 *
43 * Copy length bytes from memory area src to memory area dest.
44 * The memory areas should not overlap.
45 * Destination and source addresses must be 4-byte aligned.
46 * --------------------------------------------------------------------------
47 */
48func memcpy4
49#if ASM_ASSERTION
50 orr r3, r0, r1
51 tst r3, #0x3
52 ASM_ASSERT(eq)
53#endif
54/* copy 4 bytes at a time */
55m_loop4:
56 cmp r2, #4
57 blt m_loop1
58 ldr r3, [r1], #4
59 str r3, [r0], #4
60 sub r2, r2, #4
61 b m_loop4
62/* copy byte per byte */
63m_loop1:
64 cmp r2,#0
65 beq m_end
66 ldrb r3, [r1], #1
67 strb r3, [r0], #1
68 subs r2, r2, #1
69 bne m_loop1
70m_end:
71 bx lr
72endfunc memcpy4
73
74/* ---------------------------------------------------------------------------
75 * Disable the MMU in Secure State
76 * ---------------------------------------------------------------------------
77 */
78
79func disable_mmu
80 mov r1, #(HSCTLR_M_BIT | HSCTLR_C_BIT)
81do_disable_mmu:
82 ldcopr r0, HSCTLR
83 bic r0, r0, r1
84 stcopr r0, HSCTLR
85 isb // ensure MMU is off
86 dsb sy
87 bx lr
88endfunc disable_mmu
89
90
91func disable_mmu_icache
92 ldr r1, =(HSCTLR_M_BIT | HSCTLR_C_BIT | HSCTLR_I_BIT)
93 b do_disable_mmu
94endfunc disable_mmu_icache