blob: 4cdd33d388bd7efe9a22e909e1c72c6b61d459e7 [file] [log] [blame]
TTornblom99f0be22019-12-17 16:22:38 +01001/**************************************************************************//**
2 * @file cmsis_iccarm.h
3 * @brief CMSIS compiler ICCARM (IAR Compiler for Arm) header file
4 * @version V5.1.1
5 * @date 30. July 2019
6 ******************************************************************************/
7
8//------------------------------------------------------------------------------
9//
TTornblom34d8bf42020-06-12 21:46:43 +020010// Copyright (c) 2017-2020 IAR Systems
11// Copyright (c) 2017-2019 Arm Limited. All rights reserved.
TTornblom99f0be22019-12-17 16:22:38 +010012//
13// Licensed under the Apache License, Version 2.0 (the "License")
14// you may not use this file except in compliance with the License.
15// You may obtain a copy of the License at
16// http://www.apache.org/licenses/LICENSE-2.0
17//
18// Unless required by applicable law or agreed to in writing, software
19// distributed under the License is distributed on an "AS IS" BASIS,
20// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21// See the License for the specific language governing permissions and
22// limitations under the License.
23//
24//------------------------------------------------------------------------------
25
26
27#ifndef __CMSIS_ICCARM_H__
28#define __CMSIS_ICCARM_H__
29
30#ifndef __ICCARM__
31 #error This file should only be compiled by ICCARM
32#endif
33
34#pragma system_include
35
36#define __IAR_FT _Pragma("inline=forced") __intrinsic
37
38#if (__VER__ >= 8000000)
39 #define __ICCARM_V8 1
40#else
41 #define __ICCARM_V8 0
42#endif
43
44#ifndef __ALIGNED
45 #if __ICCARM_V8
46 #define __ALIGNED(x) __attribute__((aligned(x)))
47 #elif (__VER__ >= 7080000)
48 /* Needs IAR language extensions */
49 #define __ALIGNED(x) __attribute__((aligned(x)))
50 #else
51 #warning No compiler specific solution for __ALIGNED.__ALIGNED is ignored.
52 #define __ALIGNED(x)
53 #endif
54#endif
55
56
57/* Define compiler macros for CPU architecture, used in CMSIS 5.
58 */
59#if __ARM_ARCH_6M__ || __ARM_ARCH_7M__ || __ARM_ARCH_7EM__ || __ARM_ARCH_8M_BASE__ || __ARM_ARCH_8M_MAIN__
60/* Macros already defined */
61#else
62 #if defined(__ARM8M_MAINLINE__) || defined(__ARM8EM_MAINLINE__)
63 #define __ARM_ARCH_8M_MAIN__ 1
64 #elif defined(__ARM8M_BASELINE__)
65 #define __ARM_ARCH_8M_BASE__ 1
66 #elif defined(__ARM_ARCH_PROFILE) && __ARM_ARCH_PROFILE == 'M'
67 #if __ARM_ARCH == 6
68 #define __ARM_ARCH_6M__ 1
69 #elif __ARM_ARCH == 7
70 #if __ARM_FEATURE_DSP
71 #define __ARM_ARCH_7EM__ 1
72 #else
73 #define __ARM_ARCH_7M__ 1
74 #endif
75 #endif /* __ARM_ARCH */
76 #endif /* __ARM_ARCH_PROFILE == 'M' */
77#endif
78
79/* Alternativ core deduction for older ICCARM's */
80#if !defined(__ARM_ARCH_6M__) && !defined(__ARM_ARCH_7M__) && !defined(__ARM_ARCH_7EM__) && \
81 !defined(__ARM_ARCH_8M_BASE__) && !defined(__ARM_ARCH_8M_MAIN__)
82 #if defined(__ARM6M__) && (__CORE__ == __ARM6M__)
83 #define __ARM_ARCH_6M__ 1
84 #elif defined(__ARM7M__) && (__CORE__ == __ARM7M__)
85 #define __ARM_ARCH_7M__ 1
86 #elif defined(__ARM7EM__) && (__CORE__ == __ARM7EM__)
87 #define __ARM_ARCH_7EM__ 1
88 #elif defined(__ARM8M_BASELINE__) && (__CORE == __ARM8M_BASELINE__)
89 #define __ARM_ARCH_8M_BASE__ 1
90 #elif defined(__ARM8M_MAINLINE__) && (__CORE == __ARM8M_MAINLINE__)
91 #define __ARM_ARCH_8M_MAIN__ 1
92 #elif defined(__ARM8EM_MAINLINE__) && (__CORE == __ARM8EM_MAINLINE__)
93 #define __ARM_ARCH_8M_MAIN__ 1
94 #else
95 #error "Unknown target."
96 #endif
97#endif
98
99
100
101#if defined(__ARM_ARCH_6M__) && __ARM_ARCH_6M__==1
102 #define __IAR_M0_FAMILY 1
103#elif defined(__ARM_ARCH_8M_BASE__) && __ARM_ARCH_8M_BASE__==1
104 #define __IAR_M0_FAMILY 1
105#else
106 #define __IAR_M0_FAMILY 0
107#endif
108
109
110#ifndef __ASM
111 #define __ASM __asm
112#endif
113
114#ifndef __COMPILER_BARRIER
115 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
116#endif
117
118#ifndef __INLINE
119 #define __INLINE inline
120#endif
121
122#ifndef __NO_RETURN
123 #if __ICCARM_V8
124 #define __NO_RETURN __attribute__((__noreturn__))
125 #else
126 #define __NO_RETURN _Pragma("object_attribute=__noreturn")
127 #endif
128#endif
129
130#ifndef __PACKED
131 #if __ICCARM_V8
132 #define __PACKED __attribute__((packed, aligned(1)))
133 #else
134 /* Needs IAR language extensions */
135 #define __PACKED __packed
136 #endif
137#endif
138
139#ifndef __PACKED_STRUCT
140 #if __ICCARM_V8
141 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
142 #else
143 /* Needs IAR language extensions */
144 #define __PACKED_STRUCT __packed struct
145 #endif
146#endif
147
148#ifndef __PACKED_UNION
149 #if __ICCARM_V8
150 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
151 #else
152 /* Needs IAR language extensions */
153 #define __PACKED_UNION __packed union
154 #endif
155#endif
156
157#ifndef __RESTRICT
158 #if __ICCARM_V8
159 #define __RESTRICT __restrict
160 #else
161 /* Needs IAR language extensions */
162 #define __RESTRICT restrict
163 #endif
164#endif
165
166#ifndef __STATIC_INLINE
167 #define __STATIC_INLINE static inline
168#endif
169
170#ifndef __FORCEINLINE
171 #define __FORCEINLINE _Pragma("inline=forced")
172#endif
173
174#ifndef __STATIC_FORCEINLINE
175 #define __STATIC_FORCEINLINE __FORCEINLINE __STATIC_INLINE
176#endif
177
178#ifndef __UNALIGNED_UINT16_READ
179#pragma language=save
180#pragma language=extended
181__IAR_FT uint16_t __iar_uint16_read(void const *ptr)
182{
183 return *(__packed uint16_t*)(ptr);
184}
185#pragma language=restore
186#define __UNALIGNED_UINT16_READ(PTR) __iar_uint16_read(PTR)
187#endif
188
189
190#ifndef __UNALIGNED_UINT16_WRITE
191#pragma language=save
192#pragma language=extended
193__IAR_FT void __iar_uint16_write(void const *ptr, uint16_t val)
194{
195 *(__packed uint16_t*)(ptr) = val;;
196}
197#pragma language=restore
198#define __UNALIGNED_UINT16_WRITE(PTR,VAL) __iar_uint16_write(PTR,VAL)
199#endif
200
201#ifndef __UNALIGNED_UINT32_READ
202#pragma language=save
203#pragma language=extended
204__IAR_FT uint32_t __iar_uint32_read(void const *ptr)
205{
206 return *(__packed uint32_t*)(ptr);
207}
208#pragma language=restore
209#define __UNALIGNED_UINT32_READ(PTR) __iar_uint32_read(PTR)
210#endif
211
212#ifndef __UNALIGNED_UINT32_WRITE
213#pragma language=save
214#pragma language=extended
215__IAR_FT void __iar_uint32_write(void const *ptr, uint32_t val)
216{
217 *(__packed uint32_t*)(ptr) = val;;
218}
219#pragma language=restore
220#define __UNALIGNED_UINT32_WRITE(PTR,VAL) __iar_uint32_write(PTR,VAL)
221#endif
222
223#ifndef __UNALIGNED_UINT32 /* deprecated */
224#pragma language=save
225#pragma language=extended
226__packed struct __iar_u32 { uint32_t v; };
227#pragma language=restore
228#define __UNALIGNED_UINT32(PTR) (((struct __iar_u32 *)(PTR))->v)
229#endif
230
231#ifndef __USED
232 #if __ICCARM_V8
233 #define __USED __attribute__((used))
234 #else
235 #define __USED _Pragma("__root")
236 #endif
237#endif
238
TTornblom34d8bf42020-06-12 21:46:43 +0200239#undef __WEAK /* undo the definition from DLib_Defaults.h */
240#if __ICCARM_V8
241 #define __WEAK __attribute__((weak))
242#else
243 #define __WEAK _Pragma("__weak")
TTornblom99f0be22019-12-17 16:22:38 +0100244#endif
245
246#ifndef __PROGRAM_START
247#define __PROGRAM_START __iar_program_start
248#endif
249
250#ifndef __INITIAL_SP
Michel Jaouene2df5c12020-07-28 11:18:36 +0200251#define __INITIAL_SP ARM_LIB_STACK$$Limit
TTornblom99f0be22019-12-17 16:22:38 +0100252#endif
253
254#ifndef __STACK_LIMIT
Michel Jaouene2df5c12020-07-28 11:18:36 +0200255#define __STACK_LIMIT ARM_LIB_STACK$$Base
TTornblom99f0be22019-12-17 16:22:38 +0100256#endif
257
258#ifndef __VECTOR_TABLE
259#define __VECTOR_TABLE __vector_table
260#endif
261
262#ifndef __VECTOR_TABLE_ATTRIBUTE
263#define __VECTOR_TABLE_ATTRIBUTE @".intvec"
264#endif
265
266#ifndef __ICCARM_INTRINSICS_VERSION__
267 #define __ICCARM_INTRINSICS_VERSION__ 0
268#endif
269
270#if __ICCARM_INTRINSICS_VERSION__ == 2
271
272 #if defined(__CLZ)
273 #undef __CLZ
274 #endif
275 #if defined(__REVSH)
276 #undef __REVSH
277 #endif
278 #if defined(__RBIT)
279 #undef __RBIT
280 #endif
281 #if defined(__SSAT)
282 #undef __SSAT
283 #endif
284 #if defined(__USAT)
285 #undef __USAT
286 #endif
287
288 #include "iccarm_builtin.h"
289
290 #define __disable_fault_irq __iar_builtin_disable_fiq
291 #define __disable_irq __iar_builtin_disable_interrupt
292 #define __enable_fault_irq __iar_builtin_enable_fiq
293 #define __enable_irq __iar_builtin_enable_interrupt
294 #define __arm_rsr __iar_builtin_rsr
295 #define __arm_wsr __iar_builtin_wsr
296
297
298 #define __get_APSR() (__arm_rsr("APSR"))
299 #define __get_BASEPRI() (__arm_rsr("BASEPRI"))
300 #define __get_CONTROL() (__arm_rsr("CONTROL"))
301 #define __get_FAULTMASK() (__arm_rsr("FAULTMASK"))
302
303 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
304 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
305 #define __get_FPSCR() (__arm_rsr("FPSCR"))
306 #define __set_FPSCR(VALUE) (__arm_wsr("FPSCR", (VALUE)))
307 #else
308 #define __get_FPSCR() ( 0 )
309 #define __set_FPSCR(VALUE) ((void)VALUE)
310 #endif
311
312 #define __get_IPSR() (__arm_rsr("IPSR"))
313 #define __get_MSP() (__arm_rsr("MSP"))
314 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
315 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
316 // without main extensions, the non-secure MSPLIM is RAZ/WI
317 #define __get_MSPLIM() (0U)
318 #else
319 #define __get_MSPLIM() (__arm_rsr("MSPLIM"))
320 #endif
321 #define __get_PRIMASK() (__arm_rsr("PRIMASK"))
322 #define __get_PSP() (__arm_rsr("PSP"))
323
324 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
325 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
326 // without main extensions, the non-secure PSPLIM is RAZ/WI
327 #define __get_PSPLIM() (0U)
328 #else
329 #define __get_PSPLIM() (__arm_rsr("PSPLIM"))
330 #endif
331
332 #define __get_xPSR() (__arm_rsr("xPSR"))
333
334 #define __set_BASEPRI(VALUE) (__arm_wsr("BASEPRI", (VALUE)))
335 #define __set_BASEPRI_MAX(VALUE) (__arm_wsr("BASEPRI_MAX", (VALUE)))
336 #define __set_CONTROL(VALUE) (__arm_wsr("CONTROL", (VALUE)))
337 #define __set_FAULTMASK(VALUE) (__arm_wsr("FAULTMASK", (VALUE)))
338 #define __set_MSP(VALUE) (__arm_wsr("MSP", (VALUE)))
339
340 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
341 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
342 // without main extensions, the non-secure MSPLIM is RAZ/WI
343 #define __set_MSPLIM(VALUE) ((void)(VALUE))
344 #else
345 #define __set_MSPLIM(VALUE) (__arm_wsr("MSPLIM", (VALUE)))
346 #endif
347 #define __set_PRIMASK(VALUE) (__arm_wsr("PRIMASK", (VALUE)))
348 #define __set_PSP(VALUE) (__arm_wsr("PSP", (VALUE)))
349 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
350 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
351 // without main extensions, the non-secure PSPLIM is RAZ/WI
352 #define __set_PSPLIM(VALUE) ((void)(VALUE))
353 #else
354 #define __set_PSPLIM(VALUE) (__arm_wsr("PSPLIM", (VALUE)))
355 #endif
356
357 #define __TZ_get_CONTROL_NS() (__arm_rsr("CONTROL_NS"))
358 #define __TZ_set_CONTROL_NS(VALUE) (__arm_wsr("CONTROL_NS", (VALUE)))
359 #define __TZ_get_PSP_NS() (__arm_rsr("PSP_NS"))
360 #define __TZ_set_PSP_NS(VALUE) (__arm_wsr("PSP_NS", (VALUE)))
361 #define __TZ_get_MSP_NS() (__arm_rsr("MSP_NS"))
362 #define __TZ_set_MSP_NS(VALUE) (__arm_wsr("MSP_NS", (VALUE)))
363 #define __TZ_get_SP_NS() (__arm_rsr("SP_NS"))
364 #define __TZ_set_SP_NS(VALUE) (__arm_wsr("SP_NS", (VALUE)))
365 #define __TZ_get_PRIMASK_NS() (__arm_rsr("PRIMASK_NS"))
366 #define __TZ_set_PRIMASK_NS(VALUE) (__arm_wsr("PRIMASK_NS", (VALUE)))
367 #define __TZ_get_BASEPRI_NS() (__arm_rsr("BASEPRI_NS"))
368 #define __TZ_set_BASEPRI_NS(VALUE) (__arm_wsr("BASEPRI_NS", (VALUE)))
369 #define __TZ_get_FAULTMASK_NS() (__arm_rsr("FAULTMASK_NS"))
370 #define __TZ_set_FAULTMASK_NS(VALUE)(__arm_wsr("FAULTMASK_NS", (VALUE)))
371
372 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
373 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
374 // without main extensions, the non-secure PSPLIM is RAZ/WI
375 #define __TZ_get_PSPLIM_NS() (0U)
376 #define __TZ_set_PSPLIM_NS(VALUE) ((void)(VALUE))
377 #else
378 #define __TZ_get_PSPLIM_NS() (__arm_rsr("PSPLIM_NS"))
379 #define __TZ_set_PSPLIM_NS(VALUE) (__arm_wsr("PSPLIM_NS", (VALUE)))
380 #endif
381
382 #define __TZ_get_MSPLIM_NS() (__arm_rsr("MSPLIM_NS"))
383 #define __TZ_set_MSPLIM_NS(VALUE) (__arm_wsr("MSPLIM_NS", (VALUE)))
384
385 #define __NOP __iar_builtin_no_operation
386
387 #define __CLZ __iar_builtin_CLZ
388 #define __CLREX __iar_builtin_CLREX
389
390 #define __DMB __iar_builtin_DMB
391 #define __DSB __iar_builtin_DSB
392 #define __ISB __iar_builtin_ISB
393
394 #define __LDREXB __iar_builtin_LDREXB
395 #define __LDREXH __iar_builtin_LDREXH
396 #define __LDREXW __iar_builtin_LDREX
397
398 #define __RBIT __iar_builtin_RBIT
399 #define __REV __iar_builtin_REV
400 #define __REV16 __iar_builtin_REV16
401
402 __IAR_FT int16_t __REVSH(int16_t val)
403 {
404 return (int16_t) __iar_builtin_REVSH(val);
405 }
406
407 #define __ROR __iar_builtin_ROR
408 #define __RRX __iar_builtin_RRX
409
410 #define __SEV __iar_builtin_SEV
411
412 #if !__IAR_M0_FAMILY
413 #define __SSAT __iar_builtin_SSAT
414 #endif
415
416 #define __STREXB __iar_builtin_STREXB
417 #define __STREXH __iar_builtin_STREXH
418 #define __STREXW __iar_builtin_STREX
419
420 #if !__IAR_M0_FAMILY
421 #define __USAT __iar_builtin_USAT
422 #endif
423
424 #define __WFE __iar_builtin_WFE
425 #define __WFI __iar_builtin_WFI
426
427 #if __ARM_MEDIA__
428 #define __SADD8 __iar_builtin_SADD8
429 #define __QADD8 __iar_builtin_QADD8
430 #define __SHADD8 __iar_builtin_SHADD8
431 #define __UADD8 __iar_builtin_UADD8
432 #define __UQADD8 __iar_builtin_UQADD8
433 #define __UHADD8 __iar_builtin_UHADD8
434 #define __SSUB8 __iar_builtin_SSUB8
435 #define __QSUB8 __iar_builtin_QSUB8
436 #define __SHSUB8 __iar_builtin_SHSUB8
437 #define __USUB8 __iar_builtin_USUB8
438 #define __UQSUB8 __iar_builtin_UQSUB8
439 #define __UHSUB8 __iar_builtin_UHSUB8
440 #define __SADD16 __iar_builtin_SADD16
441 #define __QADD16 __iar_builtin_QADD16
442 #define __SHADD16 __iar_builtin_SHADD16
443 #define __UADD16 __iar_builtin_UADD16
444 #define __UQADD16 __iar_builtin_UQADD16
445 #define __UHADD16 __iar_builtin_UHADD16
446 #define __SSUB16 __iar_builtin_SSUB16
447 #define __QSUB16 __iar_builtin_QSUB16
448 #define __SHSUB16 __iar_builtin_SHSUB16
449 #define __USUB16 __iar_builtin_USUB16
450 #define __UQSUB16 __iar_builtin_UQSUB16
451 #define __UHSUB16 __iar_builtin_UHSUB16
452 #define __SASX __iar_builtin_SASX
453 #define __QASX __iar_builtin_QASX
454 #define __SHASX __iar_builtin_SHASX
455 #define __UASX __iar_builtin_UASX
456 #define __UQASX __iar_builtin_UQASX
457 #define __UHASX __iar_builtin_UHASX
458 #define __SSAX __iar_builtin_SSAX
459 #define __QSAX __iar_builtin_QSAX
460 #define __SHSAX __iar_builtin_SHSAX
461 #define __USAX __iar_builtin_USAX
462 #define __UQSAX __iar_builtin_UQSAX
463 #define __UHSAX __iar_builtin_UHSAX
464 #define __USAD8 __iar_builtin_USAD8
465 #define __USADA8 __iar_builtin_USADA8
466 #define __SSAT16 __iar_builtin_SSAT16
467 #define __USAT16 __iar_builtin_USAT16
468 #define __UXTB16 __iar_builtin_UXTB16
469 #define __UXTAB16 __iar_builtin_UXTAB16
470 #define __SXTB16 __iar_builtin_SXTB16
471 #define __SXTAB16 __iar_builtin_SXTAB16
472 #define __SMUAD __iar_builtin_SMUAD
473 #define __SMUADX __iar_builtin_SMUADX
474 #define __SMMLA __iar_builtin_SMMLA
475 #define __SMLAD __iar_builtin_SMLAD
476 #define __SMLADX __iar_builtin_SMLADX
477 #define __SMLALD __iar_builtin_SMLALD
478 #define __SMLALDX __iar_builtin_SMLALDX
479 #define __SMUSD __iar_builtin_SMUSD
480 #define __SMUSDX __iar_builtin_SMUSDX
481 #define __SMLSD __iar_builtin_SMLSD
482 #define __SMLSDX __iar_builtin_SMLSDX
483 #define __SMLSLD __iar_builtin_SMLSLD
484 #define __SMLSLDX __iar_builtin_SMLSLDX
485 #define __SEL __iar_builtin_SEL
486 #define __QADD __iar_builtin_QADD
487 #define __QSUB __iar_builtin_QSUB
488 #define __PKHBT __iar_builtin_PKHBT
489 #define __PKHTB __iar_builtin_PKHTB
490 #endif
491
492#else /* __ICCARM_INTRINSICS_VERSION__ == 2 */
493
494 #if __IAR_M0_FAMILY
495 /* Avoid clash between intrinsics.h and arm_math.h when compiling for Cortex-M0. */
496 #define __CLZ __cmsis_iar_clz_not_active
497 #define __SSAT __cmsis_iar_ssat_not_active
498 #define __USAT __cmsis_iar_usat_not_active
499 #define __RBIT __cmsis_iar_rbit_not_active
500 #define __get_APSR __cmsis_iar_get_APSR_not_active
501 #endif
502
503
504 #if (!((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
505 (defined (__FPU_USED ) && (__FPU_USED == 1U)) ))
506 #define __get_FPSCR __cmsis_iar_get_FPSR_not_active
507 #define __set_FPSCR __cmsis_iar_set_FPSR_not_active
508 #endif
509
510 #ifdef __INTRINSICS_INCLUDED
511 #error intrinsics.h is already included previously!
512 #endif
513
514 #include <intrinsics.h>
515
516 #if __IAR_M0_FAMILY
517 /* Avoid clash between intrinsics.h and arm_math.h when compiling for Cortex-M0. */
518 #undef __CLZ
519 #undef __SSAT
520 #undef __USAT
521 #undef __RBIT
522 #undef __get_APSR
523
524 __STATIC_INLINE uint8_t __CLZ(uint32_t data)
525 {
526 if (data == 0U) { return 32U; }
527
528 uint32_t count = 0U;
529 uint32_t mask = 0x80000000U;
530
531 while ((data & mask) == 0U)
532 {
533 count += 1U;
534 mask = mask >> 1U;
535 }
536 return count;
537 }
538
539 __STATIC_INLINE uint32_t __RBIT(uint32_t v)
540 {
541 uint8_t sc = 31U;
542 uint32_t r = v;
543 for (v >>= 1U; v; v >>= 1U)
544 {
545 r <<= 1U;
546 r |= v & 1U;
547 sc--;
548 }
549 return (r << sc);
550 }
551
552 __STATIC_INLINE uint32_t __get_APSR(void)
553 {
554 uint32_t res;
555 __asm("MRS %0,APSR" : "=r" (res));
556 return res;
557 }
558
559 #endif
560
561 #if (!((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
562 (defined (__FPU_USED ) && (__FPU_USED == 1U)) ))
563 #undef __get_FPSCR
564 #undef __set_FPSCR
565 #define __get_FPSCR() (0)
566 #define __set_FPSCR(VALUE) ((void)VALUE)
567 #endif
568
569 #pragma diag_suppress=Pe940
570 #pragma diag_suppress=Pe177
571
572 #define __enable_irq __enable_interrupt
573 #define __disable_irq __disable_interrupt
574 #define __NOP __no_operation
575
576 #define __get_xPSR __get_PSR
577
578 #if (!defined(__ARM_ARCH_6M__) || __ARM_ARCH_6M__==0)
579
580 __IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr)
581 {
582 return __LDREX((unsigned long *)ptr);
583 }
584
585 __IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr)
586 {
587 return __STREX(value, (unsigned long *)ptr);
588 }
589 #endif
590
591
592 /* __CORTEX_M is defined in core_cm0.h, core_cm3.h and core_cm4.h. */
593 #if (__CORTEX_M >= 0x03)
594
595 __IAR_FT uint32_t __RRX(uint32_t value)
596 {
597 uint32_t result;
598 __ASM volatile("RRX %0, %1" : "=r"(result) : "r" (value));
599 return(result);
600 }
601
602 __IAR_FT void __set_BASEPRI_MAX(uint32_t value)
603 {
604 __asm volatile("MSR BASEPRI_MAX,%0"::"r" (value));
605 }
606
607
608 #define __enable_fault_irq __enable_fiq
609 #define __disable_fault_irq __disable_fiq
610
611
612 #endif /* (__CORTEX_M >= 0x03) */
613
614 __IAR_FT uint32_t __ROR(uint32_t op1, uint32_t op2)
615 {
616 return (op1 >> op2) | (op1 << ((sizeof(op1)*8)-op2));
617 }
618
619 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
620 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
621
622 __IAR_FT uint32_t __get_MSPLIM(void)
623 {
624 uint32_t res;
625 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
626 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
627 // without main extensions, the non-secure MSPLIM is RAZ/WI
628 res = 0U;
629 #else
630 __asm volatile("MRS %0,MSPLIM" : "=r" (res));
631 #endif
632 return res;
633 }
634
635 __IAR_FT void __set_MSPLIM(uint32_t value)
636 {
637 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
638 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
639 // without main extensions, the non-secure MSPLIM is RAZ/WI
640 (void)value;
641 #else
642 __asm volatile("MSR MSPLIM,%0" :: "r" (value));
643 #endif
644 }
645
646 __IAR_FT uint32_t __get_PSPLIM(void)
647 {
648 uint32_t res;
649 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
650 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
651 // without main extensions, the non-secure PSPLIM is RAZ/WI
652 res = 0U;
653 #else
654 __asm volatile("MRS %0,PSPLIM" : "=r" (res));
655 #endif
656 return res;
657 }
658
659 __IAR_FT void __set_PSPLIM(uint32_t value)
660 {
661 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
662 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
663 // without main extensions, the non-secure PSPLIM is RAZ/WI
664 (void)value;
665 #else
666 __asm volatile("MSR PSPLIM,%0" :: "r" (value));
667 #endif
668 }
669
670 __IAR_FT uint32_t __TZ_get_CONTROL_NS(void)
671 {
672 uint32_t res;
673 __asm volatile("MRS %0,CONTROL_NS" : "=r" (res));
674 return res;
675 }
676
677 __IAR_FT void __TZ_set_CONTROL_NS(uint32_t value)
678 {
679 __asm volatile("MSR CONTROL_NS,%0" :: "r" (value));
680 }
681
682 __IAR_FT uint32_t __TZ_get_PSP_NS(void)
683 {
684 uint32_t res;
685 __asm volatile("MRS %0,PSP_NS" : "=r" (res));
686 return res;
687 }
688
689 __IAR_FT void __TZ_set_PSP_NS(uint32_t value)
690 {
691 __asm volatile("MSR PSP_NS,%0" :: "r" (value));
692 }
693
694 __IAR_FT uint32_t __TZ_get_MSP_NS(void)
695 {
696 uint32_t res;
697 __asm volatile("MRS %0,MSP_NS" : "=r" (res));
698 return res;
699 }
700
701 __IAR_FT void __TZ_set_MSP_NS(uint32_t value)
702 {
703 __asm volatile("MSR MSP_NS,%0" :: "r" (value));
704 }
705
706 __IAR_FT uint32_t __TZ_get_SP_NS(void)
707 {
708 uint32_t res;
709 __asm volatile("MRS %0,SP_NS" : "=r" (res));
710 return res;
711 }
712 __IAR_FT void __TZ_set_SP_NS(uint32_t value)
713 {
714 __asm volatile("MSR SP_NS,%0" :: "r" (value));
715 }
716
717 __IAR_FT uint32_t __TZ_get_PRIMASK_NS(void)
718 {
719 uint32_t res;
720 __asm volatile("MRS %0,PRIMASK_NS" : "=r" (res));
721 return res;
722 }
723
724 __IAR_FT void __TZ_set_PRIMASK_NS(uint32_t value)
725 {
726 __asm volatile("MSR PRIMASK_NS,%0" :: "r" (value));
727 }
728
729 __IAR_FT uint32_t __TZ_get_BASEPRI_NS(void)
730 {
731 uint32_t res;
732 __asm volatile("MRS %0,BASEPRI_NS" : "=r" (res));
733 return res;
734 }
735
736 __IAR_FT void __TZ_set_BASEPRI_NS(uint32_t value)
737 {
738 __asm volatile("MSR BASEPRI_NS,%0" :: "r" (value));
739 }
740
741 __IAR_FT uint32_t __TZ_get_FAULTMASK_NS(void)
742 {
743 uint32_t res;
744 __asm volatile("MRS %0,FAULTMASK_NS" : "=r" (res));
745 return res;
746 }
747
748 __IAR_FT void __TZ_set_FAULTMASK_NS(uint32_t value)
749 {
750 __asm volatile("MSR FAULTMASK_NS,%0" :: "r" (value));
751 }
752
753 __IAR_FT uint32_t __TZ_get_PSPLIM_NS(void)
754 {
755 uint32_t res;
756 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
757 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
758 // without main extensions, the non-secure PSPLIM is RAZ/WI
759 res = 0U;
760 #else
761 __asm volatile("MRS %0,PSPLIM_NS" : "=r" (res));
762 #endif
763 return res;
764 }
765
766 __IAR_FT void __TZ_set_PSPLIM_NS(uint32_t value)
767 {
768 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
769 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
770 // without main extensions, the non-secure PSPLIM is RAZ/WI
771 (void)value;
772 #else
773 __asm volatile("MSR PSPLIM_NS,%0" :: "r" (value));
774 #endif
775 }
776
777 __IAR_FT uint32_t __TZ_get_MSPLIM_NS(void)
778 {
779 uint32_t res;
780 __asm volatile("MRS %0,MSPLIM_NS" : "=r" (res));
781 return res;
782 }
783
784 __IAR_FT void __TZ_set_MSPLIM_NS(uint32_t value)
785 {
786 __asm volatile("MSR MSPLIM_NS,%0" :: "r" (value));
787 }
788
789 #endif /* __ARM_ARCH_8M_MAIN__ or __ARM_ARCH_8M_BASE__ */
790
791#endif /* __ICCARM_INTRINSICS_VERSION__ == 2 */
792
793#define __BKPT(value) __asm volatile ("BKPT %0" : : "i"(value))
794
795#if __IAR_M0_FAMILY
796 __STATIC_INLINE int32_t __SSAT(int32_t val, uint32_t sat)
797 {
798 if ((sat >= 1U) && (sat <= 32U))
799 {
800 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
801 const int32_t min = -1 - max ;
802 if (val > max)
803 {
804 return max;
805 }
806 else if (val < min)
807 {
808 return min;
809 }
810 }
811 return val;
812 }
813
814 __STATIC_INLINE uint32_t __USAT(int32_t val, uint32_t sat)
815 {
816 if (sat <= 31U)
817 {
818 const uint32_t max = ((1U << sat) - 1U);
819 if (val > (int32_t)max)
820 {
821 return max;
822 }
823 else if (val < 0)
824 {
825 return 0U;
826 }
827 }
828 return (uint32_t)val;
829 }
830#endif
831
832#if (__CORTEX_M >= 0x03) /* __CORTEX_M is defined in core_cm0.h, core_cm3.h and core_cm4.h. */
833
834 __IAR_FT uint8_t __LDRBT(volatile uint8_t *addr)
835 {
836 uint32_t res;
837 __ASM volatile ("LDRBT %0, [%1]" : "=r" (res) : "r" (addr) : "memory");
838 return ((uint8_t)res);
839 }
840
841 __IAR_FT uint16_t __LDRHT(volatile uint16_t *addr)
842 {
843 uint32_t res;
844 __ASM volatile ("LDRHT %0, [%1]" : "=r" (res) : "r" (addr) : "memory");
845 return ((uint16_t)res);
846 }
847
848 __IAR_FT uint32_t __LDRT(volatile uint32_t *addr)
849 {
850 uint32_t res;
851 __ASM volatile ("LDRT %0, [%1]" : "=r" (res) : "r" (addr) : "memory");
852 return res;
853 }
854
855 __IAR_FT void __STRBT(uint8_t value, volatile uint8_t *addr)
856 {
857 __ASM volatile ("STRBT %1, [%0]" : : "r" (addr), "r" ((uint32_t)value) : "memory");
858 }
859
860 __IAR_FT void __STRHT(uint16_t value, volatile uint16_t *addr)
861 {
862 __ASM volatile ("STRHT %1, [%0]" : : "r" (addr), "r" ((uint32_t)value) : "memory");
863 }
864
865 __IAR_FT void __STRT(uint32_t value, volatile uint32_t *addr)
866 {
867 __ASM volatile ("STRT %1, [%0]" : : "r" (addr), "r" (value) : "memory");
868 }
869
870#endif /* (__CORTEX_M >= 0x03) */
871
872#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
873 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
874
875
876 __IAR_FT uint8_t __LDAB(volatile uint8_t *ptr)
877 {
878 uint32_t res;
879 __ASM volatile ("LDAB %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
880 return ((uint8_t)res);
881 }
882
883 __IAR_FT uint16_t __LDAH(volatile uint16_t *ptr)
884 {
885 uint32_t res;
886 __ASM volatile ("LDAH %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
887 return ((uint16_t)res);
888 }
889
890 __IAR_FT uint32_t __LDA(volatile uint32_t *ptr)
891 {
892 uint32_t res;
893 __ASM volatile ("LDA %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
894 return res;
895 }
896
897 __IAR_FT void __STLB(uint8_t value, volatile uint8_t *ptr)
898 {
899 __ASM volatile ("STLB %1, [%0]" :: "r" (ptr), "r" (value) : "memory");
900 }
901
902 __IAR_FT void __STLH(uint16_t value, volatile uint16_t *ptr)
903 {
904 __ASM volatile ("STLH %1, [%0]" :: "r" (ptr), "r" (value) : "memory");
905 }
906
907 __IAR_FT void __STL(uint32_t value, volatile uint32_t *ptr)
908 {
909 __ASM volatile ("STL %1, [%0]" :: "r" (ptr), "r" (value) : "memory");
910 }
911
912 __IAR_FT uint8_t __LDAEXB(volatile uint8_t *ptr)
913 {
914 uint32_t res;
915 __ASM volatile ("LDAEXB %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
916 return ((uint8_t)res);
917 }
918
919 __IAR_FT uint16_t __LDAEXH(volatile uint16_t *ptr)
920 {
921 uint32_t res;
922 __ASM volatile ("LDAEXH %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
923 return ((uint16_t)res);
924 }
925
926 __IAR_FT uint32_t __LDAEX(volatile uint32_t *ptr)
927 {
928 uint32_t res;
929 __ASM volatile ("LDAEX %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
930 return res;
931 }
932
933 __IAR_FT uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
934 {
935 uint32_t res;
936 __ASM volatile ("STLEXB %0, %2, [%1]" : "=r" (res) : "r" (ptr), "r" (value) : "memory");
937 return res;
938 }
939
940 __IAR_FT uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
941 {
942 uint32_t res;
943 __ASM volatile ("STLEXH %0, %2, [%1]" : "=r" (res) : "r" (ptr), "r" (value) : "memory");
944 return res;
945 }
946
947 __IAR_FT uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
948 {
949 uint32_t res;
950 __ASM volatile ("STLEX %0, %2, [%1]" : "=r" (res) : "r" (ptr), "r" (value) : "memory");
951 return res;
952 }
953
954#endif /* __ARM_ARCH_8M_MAIN__ or __ARM_ARCH_8M_BASE__ */
955
956#undef __IAR_FT
957#undef __IAR_M0_FAMILY
958#undef __ICCARM_V8
959
960#pragma diag_default=Pe940
961#pragma diag_default=Pe177
962
963#endif /* __CMSIS_ICCARM_H__ */