blob: db83a0c50afdc141966502d05e38e6e9f9c4946f [file] [log] [blame]
Achin Gupta4f6ad662013-10-25 09:08:21 +01001/*
Chris Kayda043412023-02-14 11:30:04 +00002 * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
Achin Gupta4f6ad662013-10-25 09:08:21 +01003 *
dp-arm82cb2c12017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta4f6ad662013-10-25 09:08:21 +01005 */
6
Masahiro Yamada665e71b2020-03-09 17:39:48 +09007#include <common/bl_common.ld.h>
Antonio Nino Diaz09d40e02018-12-14 00:18:21 +00008#include <lib/xlat_tables/xlat_tables_defs.h>
Achin Gupta4f6ad662013-10-25 09:08:21 +01009
10OUTPUT_FORMAT(PLATFORM_LINKER_FORMAT)
11OUTPUT_ARCH(PLATFORM_LINKER_ARCH)
Jeenu Viswambharan9f98aa12014-03-11 11:06:45 +000012ENTRY(bl2_entrypoint)
Achin Gupta4f6ad662013-10-25 09:08:21 +010013
14MEMORY {
Juan Castillod7fbf132014-09-16 10:40:35 +010015 RAM (rwx): ORIGIN = BL2_BASE, LENGTH = BL2_LIMIT - BL2_BASE
Achin Gupta4f6ad662013-10-25 09:08:21 +010016}
17
Chris Kayf90fe022022-09-29 14:36:53 +010018SECTIONS {
Harrison Mutaif6088162023-04-19 10:08:56 +010019 RAM_REGION_START = ORIGIN(RAM);
20 RAM_REGION_LENGTH = LENGTH(RAM);
Achin Gupta4f6ad662013-10-25 09:08:21 +010021 . = BL2_BASE;
Chris Kayf90fe022022-09-29 14:36:53 +010022
Antonio Nino Diaza2aedac2017-11-15 11:45:35 +000023 ASSERT(. == ALIGN(PAGE_SIZE),
Chris Kayf90fe022022-09-29 14:36:53 +010024 "BL2_BASE address is not aligned on a page boundary.")
Achin Gupta4f6ad662013-10-25 09:08:21 +010025
Sandrine Bailleux5d1c1042016-07-08 14:37:40 +010026#if SEPARATE_CODE_AND_RODATA
27 .text . : {
28 __TEXT_START__ = .;
Chris Kayf90fe022022-09-29 14:36:53 +010029
Zelalem Aweke6c09af92021-07-09 11:37:10 -050030#if ENABLE_RME
31 *bl2_rme_entrypoint.o(.text*)
32#else /* ENABLE_RME */
Sandrine Bailleux5d1c1042016-07-08 14:37:40 +010033 *bl2_entrypoint.o(.text*)
Zelalem Aweke6c09af92021-07-09 11:37:10 -050034#endif /* ENABLE_RME */
Chris Kayf90fe022022-09-29 14:36:53 +010035
Samuel Hollandebd6efa2019-10-20 16:11:25 -050036 *(SORT_BY_ALIGNMENT(.text*))
Sandrine Bailleux5d1c1042016-07-08 14:37:40 +010037 *(.vectors)
Michal Simekf7d445f2023-04-27 14:26:03 +020038 __TEXT_END_UNALIGNED__ = .;
Chris Kayf90fe022022-09-29 14:36:53 +010039
Roberto Vargas5629b2b2018-04-11 11:53:31 +010040 . = ALIGN(PAGE_SIZE);
Chris Kayf90fe022022-09-29 14:36:53 +010041
Sandrine Bailleux5d1c1042016-07-08 14:37:40 +010042 __TEXT_END__ = .;
Jorge Troncoso25135ce2022-10-20 21:42:06 -070043 } >RAM
Sandrine Bailleux5d1c1042016-07-08 14:37:40 +010044
Chris Kayf90fe022022-09-29 14:36:53 +010045 /* .ARM.extab and .ARM.exidx are only added because Clang needs them */
Jorge Troncoso25135ce2022-10-20 21:42:06 -070046 .ARM.extab . : {
Roberto Vargasad925092018-05-10 11:01:16 +010047 *(.ARM.extab* .gnu.linkonce.armextab.*)
Jorge Troncoso25135ce2022-10-20 21:42:06 -070048 } >RAM
Roberto Vargasad925092018-05-10 11:01:16 +010049
Jorge Troncoso25135ce2022-10-20 21:42:06 -070050 .ARM.exidx . : {
Roberto Vargasad925092018-05-10 11:01:16 +010051 *(.ARM.exidx* .gnu.linkonce.armexidx.*)
Jorge Troncoso25135ce2022-10-20 21:42:06 -070052 } >RAM
Roberto Vargasad925092018-05-10 11:01:16 +010053
Sandrine Bailleux5d1c1042016-07-08 14:37:40 +010054 .rodata . : {
55 __RODATA_START__ = .;
Chris Kayf90fe022022-09-29 14:36:53 +010056
Samuel Hollandebd6efa2019-10-20 16:11:25 -050057 *(SORT_BY_ALIGNMENT(.rodata*))
Sandrine Bailleux5d1c1042016-07-08 14:37:40 +010058
Chris Kayf90fe022022-09-29 14:36:53 +010059 RODATA_COMMON
Sandrine Bailleux5d1c1042016-07-08 14:37:40 +010060
Michal Simekf7d445f2023-04-27 14:26:03 +020061 __RODATA_END_UNALIGNED__ = .;
Roberto Vargas5629b2b2018-04-11 11:53:31 +010062 . = ALIGN(PAGE_SIZE);
Chris Kayf90fe022022-09-29 14:36:53 +010063
Sandrine Bailleux5d1c1042016-07-08 14:37:40 +010064 __RODATA_END__ = .;
65 } >RAM
Chris Kayf90fe022022-09-29 14:36:53 +010066#else /* SEPARATE_CODE_AND_RODATA */
Chris Kayda043412023-02-14 11:30:04 +000067 .ro . : {
Sandrine Bailleux8d69a032013-11-27 09:38:52 +000068 __RO_START__ = .;
Chris Kayf90fe022022-09-29 14:36:53 +010069
Andrew Thoelkedccc5372014-03-18 07:13:52 +000070 *bl2_entrypoint.o(.text*)
Samuel Hollandebd6efa2019-10-20 16:11:25 -050071 *(SORT_BY_ALIGNMENT(.text*))
72 *(SORT_BY_ALIGNMENT(.rodata*))
Juan Castillo05799ae2015-04-02 09:48:16 +010073
Chris Kayf90fe022022-09-29 14:36:53 +010074 RODATA_COMMON
Juan Castillo05799ae2015-04-02 09:48:16 +010075
Achin Guptab739f222014-01-18 16:50:09 +000076 *(.vectors)
Chris Kayf90fe022022-09-29 14:36:53 +010077
Sandrine Bailleux8d69a032013-11-27 09:38:52 +000078 __RO_END_UNALIGNED__ = .;
Chris Kayf90fe022022-09-29 14:36:53 +010079
Sandrine Bailleux8d69a032013-11-27 09:38:52 +000080 /*
Chris Kayf90fe022022-09-29 14:36:53 +010081 * Memory page(s) mapped to this section will be marked as read-only,
82 * executable. No RW data from the next section must creep in. Ensure
83 * that the rest of the current memory page is unused.
Sandrine Bailleux8d69a032013-11-27 09:38:52 +000084 */
Roberto Vargas5629b2b2018-04-11 11:53:31 +010085 . = ALIGN(PAGE_SIZE);
Chris Kayf90fe022022-09-29 14:36:53 +010086
Sandrine Bailleux8d69a032013-11-27 09:38:52 +000087 __RO_END__ = .;
Achin Gupta4f6ad662013-10-25 09:08:21 +010088 } >RAM
Chris Kayf90fe022022-09-29 14:36:53 +010089#endif /* SEPARATE_CODE_AND_RODATA */
Achin Gupta4f6ad662013-10-25 09:08:21 +010090
Chris Kayf90fe022022-09-29 14:36:53 +010091 __RW_START__ = .;
Achin Gupta54dc71e2015-09-11 16:03:13 +010092
Masahiro Yamadacaa3e7e2020-04-22 10:50:12 +090093 DATA_SECTION >RAM
Masahiro Yamadaa926a9f2020-04-07 13:04:24 +090094 STACK_SECTION >RAM
Masahiro Yamadaa7739bc2020-03-26 13:16:33 +090095 BSS_SECTION >RAM
Masahiro Yamada665e71b2020-03-09 17:39:48 +090096 XLAT_TABLE_SECTION >RAM
Achin Guptaa0cd9892014-02-09 13:30:38 +000097
Soby Mathewab8707e2015-01-08 18:02:44 +000098#if USE_COHERENT_MEM
Achin Guptaa0cd9892014-02-09 13:30:38 +000099 /*
Chris Kayf90fe022022-09-29 14:36:53 +0100100 * The base address of the coherent memory section must be page-aligned to
101 * guarantee that the coherent data are stored on their own pages and are
102 * not mixed with normal data. This is required to set up the correct
Sandrine Bailleux8d69a032013-11-27 09:38:52 +0000103 * memory attributes for the coherent data page tables.
104 */
Chris Kayda043412023-02-14 11:30:04 +0000105 .coherent_ram (NOLOAD) : ALIGN(PAGE_SIZE) {
Sandrine Bailleux8d69a032013-11-27 09:38:52 +0000106 __COHERENT_RAM_START__ = .;
Chris Kayda043412023-02-14 11:30:04 +0000107 *(.tzfw_coherent_mem)
Sandrine Bailleux8d69a032013-11-27 09:38:52 +0000108 __COHERENT_RAM_END_UNALIGNED__ = .;
Chris Kayf90fe022022-09-29 14:36:53 +0100109
Sandrine Bailleux8d69a032013-11-27 09:38:52 +0000110 /*
Chris Kayf90fe022022-09-29 14:36:53 +0100111 * Memory page(s) mapped to this section will be marked as device
112 * memory. No other unexpected data must creep in. Ensure the rest of
113 * the current memory page is unused.
Sandrine Bailleux8d69a032013-11-27 09:38:52 +0000114 */
Roberto Vargas5629b2b2018-04-11 11:53:31 +0100115 . = ALIGN(PAGE_SIZE);
Chris Kayf90fe022022-09-29 14:36:53 +0100116
Sandrine Bailleux8d69a032013-11-27 09:38:52 +0000117 __COHERENT_RAM_END__ = .;
Achin Gupta4f6ad662013-10-25 09:08:21 +0100118 } >RAM
Chris Kayf90fe022022-09-29 14:36:53 +0100119#endif /* USE_COHERENT_MEM */
Achin Gupta4f6ad662013-10-25 09:08:21 +0100120
Achin Gupta54dc71e2015-09-11 16:03:13 +0100121 __RW_END__ = .;
Sandrine Bailleux8d69a032013-11-27 09:38:52 +0000122 __BL2_END__ = .;
Harrison Mutaif6088162023-04-19 10:08:56 +0100123 RAM_REGION_END = .;
Achin Gupta4f6ad662013-10-25 09:08:21 +0100124
Sandrine Bailleux8d69a032013-11-27 09:38:52 +0000125 __BSS_SIZE__ = SIZEOF(.bss);
Soby Mathewab8707e2015-01-08 18:02:44 +0000126
127#if USE_COHERENT_MEM
Sandrine Bailleux8d69a032013-11-27 09:38:52 +0000128 __COHERENT_RAM_UNALIGNED_SIZE__ =
129 __COHERENT_RAM_END_UNALIGNED__ - __COHERENT_RAM_START__;
Chris Kayf90fe022022-09-29 14:36:53 +0100130#endif /* USE_COHERENT_MEM */
Sandrine Bailleuxa37255a2014-05-22 15:28:26 +0100131
132 ASSERT(. <= BL2_LIMIT, "BL2 image has exceeded its limit.")
Achin Gupta4f6ad662013-10-25 09:08:21 +0100133}