cmsis_armclang.h
Go to the documentation of this file.
1/**************************************************************************/
7/*
8 * Copyright (c) 2009-2020 Arm Limited. All rights reserved.
9 *
10 * SPDX-License-Identifier: Apache-2.0
11 *
12 * Licensed under the Apache License, Version 2.0 (the License); you may
13 * not use this file except in compliance with the License.
14 * You may obtain a copy of the License at
15 *
16 * www.apache.org/licenses/LICENSE-2.0
17 *
18 * Unless required by applicable law or agreed to in writing, software
19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 * See the License for the specific language governing permissions and
22 * limitations under the License.
23 */
24
25/*lint -esym(9058, IRQn)*/ /* disable MISRA 2012 Rule 2.4 for IRQn */
26
27#ifndef __CMSIS_ARMCLANG_H
28#define __CMSIS_ARMCLANG_H
29
30#pragma clang system_header /* treat file as system include file */
31
32#ifndef __ARM_COMPAT_H
33#include <arm_compat.h> /* Compatibility header for Arm Compiler 5 intrinsics */
34#endif
35
36/* CMSIS compiler specific defines */
37#ifndef __ASM
38 #define __ASM __asm
39#endif
40#ifndef __INLINE
41 #define __INLINE __inline
42#endif
43#ifndef __STATIC_INLINE
44 #define __STATIC_INLINE static __inline
45#endif
46#ifndef __STATIC_FORCEINLINE
47 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline
48#endif
49#ifndef __NO_RETURN
50 #define __NO_RETURN __attribute__((__noreturn__))
51#endif
52#ifndef __USED
53 #define __USED __attribute__((used))
54#endif
55#ifndef __WEAK
56 #define __WEAK __attribute__((weak))
57#endif
58#ifndef __PACKED
59 #define __PACKED __attribute__((packed, aligned(1)))
60#endif
61#ifndef __PACKED_STRUCT
62 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
63#endif
64#ifndef __PACKED_UNION
65 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
66#endif
67#ifndef __UNALIGNED_UINT32 /* deprecated */
68 #pragma clang diagnostic push
69 #pragma clang diagnostic ignored "-Wpacked"
70/*lint -esym(9058, T_UINT32)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32 */
71 struct __attribute__((packed)) T_UINT32 { uint32_t v; };
72 #pragma clang diagnostic pop
73 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
74#endif
75#ifndef __UNALIGNED_UINT16_WRITE
76 #pragma clang diagnostic push
77 #pragma clang diagnostic ignored "-Wpacked"
78/*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */
79 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
80 #pragma clang diagnostic pop
81 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
82#endif
83#ifndef __UNALIGNED_UINT16_READ
84 #pragma clang diagnostic push
85 #pragma clang diagnostic ignored "-Wpacked"
86/*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */
87 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
88 #pragma clang diagnostic pop
89 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
90#endif
91#ifndef __UNALIGNED_UINT32_WRITE
92 #pragma clang diagnostic push
93 #pragma clang diagnostic ignored "-Wpacked"
94/*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */
95 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
96 #pragma clang diagnostic pop
97 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
98#endif
99#ifndef __UNALIGNED_UINT32_READ
100 #pragma clang diagnostic push
101 #pragma clang diagnostic ignored "-Wpacked"
102/*lint -esym(9058, T_UINT32_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_READ */
103 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
104 #pragma clang diagnostic pop
105 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
106#endif
107#ifndef __ALIGNED
108 #define __ALIGNED(x) __attribute__((aligned(x)))
109#endif
110#ifndef __RESTRICT
111 #define __RESTRICT __restrict
112#endif
113#ifndef __COMPILER_BARRIER
114 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
115#endif
116
117/* ######################### Startup and Lowlevel Init ######################## */
118
119#ifndef __PROGRAM_START
120#define __PROGRAM_START __main
121#endif
122
123#ifndef __INITIAL_SP
124#define __INITIAL_SP Image$$ARM_LIB_STACK$$ZI$$Limit
125#endif
126
127#ifndef __STACK_LIMIT
128#define __STACK_LIMIT Image$$ARM_LIB_STACK$$ZI$$Base
129#endif
130
131#ifndef __VECTOR_TABLE
132#define __VECTOR_TABLE __Vectors
133#endif
134
135#ifndef __VECTOR_TABLE_ATTRIBUTE
136#define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section("RESET")))
137#endif
138
139/* ########################### Core Function Access ########################### */
150/* intrinsic void __enable_irq(); see arm_compat.h */
151
152
158/* intrinsic void __disable_irq(); see arm_compat.h */
159
160
166__STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
167{
168 uint32_t result;
169
170 __ASM volatile ("MRS %0, control" : "=r" (result) );
171 return(result);
172}
173
174
175#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
181__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
182{
183 uint32_t result;
184
185 __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
186 return(result);
187}
188#endif
189
190
196__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
197{
198 __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
199}
200
201
202#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
208__STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
209{
210 __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
211}
212#endif
213
214
220__STATIC_FORCEINLINE uint32_t __get_IPSR(void)
221{
222 uint32_t result;
223
224 __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
225 return(result);
226}
227
228
234__STATIC_FORCEINLINE uint32_t __get_APSR(void)
235{
236 uint32_t result;
237
238 __ASM volatile ("MRS %0, apsr" : "=r" (result) );
239 return(result);
240}
241
242
248__STATIC_FORCEINLINE uint32_t __get_xPSR(void)
249{
250 uint32_t result;
251
252 __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
253 return(result);
254}
255
256
262__STATIC_FORCEINLINE uint32_t __get_PSP(void)
263{
264 uint32_t result;
265
266 __ASM volatile ("MRS %0, psp" : "=r" (result) );
267 return(result);
268}
269
270
271#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
277__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
278{
279 uint32_t result;
280
281 __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
282 return(result);
283}
284#endif
285
286
292__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
293{
294 __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
295}
296
297
298#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
304__STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
305{
306 __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
307}
308#endif
309
310
316__STATIC_FORCEINLINE uint32_t __get_MSP(void)
317{
318 uint32_t result;
319
320 __ASM volatile ("MRS %0, msp" : "=r" (result) );
321 return(result);
322}
323
324
325#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
331__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
332{
333 uint32_t result;
334
335 __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
336 return(result);
337}
338#endif
339
340
346__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
347{
348 __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
349}
350
351
352#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
358__STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
359{
360 __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
361}
362#endif
363
364
365#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
371__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
372{
373 uint32_t result;
374
375 __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
376 return(result);
377}
378
379
385__STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
386{
387 __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
388}
389#endif
390
391
397__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
398{
399 uint32_t result;
400
401 __ASM volatile ("MRS %0, primask" : "=r" (result) );
402 return(result);
403}
404
405
406#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
412__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
413{
414 uint32_t result;
415
416 __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
417 return(result);
418}
419#endif
420
421
427__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
428{
429 __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
430}
431
432
433#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
439__STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
440{
441 __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
442}
443#endif
444
445
446#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
447 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
448 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
449 (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) )
455#define __enable_fault_irq __enable_fiq /* see arm_compat.h */
456
457
463#define __disable_fault_irq __disable_fiq /* see arm_compat.h */
464
465
471__STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
472{
473 uint32_t result;
474
475 __ASM volatile ("MRS %0, basepri" : "=r" (result) );
476 return(result);
477}
478
479
480#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
486__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
487{
488 uint32_t result;
489
490 __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
491 return(result);
492}
493#endif
494
495
501__STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
502{
503 __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
504}
505
506
507#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
513__STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
514{
515 __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
516}
517#endif
518
519
526__STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
527{
528 __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
529}
530
531
537__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
538{
539 uint32_t result;
540
541 __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
542 return(result);
543}
544
545
546#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
552__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
553{
554 uint32_t result;
555
556 __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
557 return(result);
558}
559#endif
560
561
567__STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
568{
569 __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
570}
571
572
573#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
579__STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
580{
581 __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
582}
583#endif
584
585#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
586 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
587 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
588 (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) */
589
590
591#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
592 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) || \
593 (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) )
594
604__STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
605{
606#if (!((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
607 (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) && \
608 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
609 // without main extensions, the non-secure PSPLIM is RAZ/WI
610 return 0U;
611#else
612 uint32_t result;
613 __ASM volatile ("MRS %0, psplim" : "=r" (result) );
614 return result;
615#endif
616}
617
618#if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
628__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
629{
630#if (!((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
631 (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) )
632 // without main extensions, the non-secure PSPLIM is RAZ/WI
633 return 0U;
634#else
635 uint32_t result;
636 __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
637 return result;
638#endif
639}
640#endif
641
642
652__STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
653{
654#if (!((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
655 (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) && \
656 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
657 // without main extensions, the non-secure PSPLIM is RAZ/WI
658 (void)ProcStackPtrLimit;
659#else
660 __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
661#endif
662}
663
664
665#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
675__STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
676{
677#if (!((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
678 (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) )
679 // without main extensions, the non-secure PSPLIM is RAZ/WI
680 (void)ProcStackPtrLimit;
681#else
682 __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
683#endif
684}
685#endif
686
687
696__STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
697{
698#if (!((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
699 (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) && \
700 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
701 // without main extensions, the non-secure MSPLIM is RAZ/WI
702 return 0U;
703#else
704 uint32_t result;
705 __ASM volatile ("MRS %0, msplim" : "=r" (result) );
706 return result;
707#endif
708}
709
710
711#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
720__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
721{
722#if (!((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
723 (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) )
724 // without main extensions, the non-secure MSPLIM is RAZ/WI
725 return 0U;
726#else
727 uint32_t result;
728 __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
729 return result;
730#endif
731}
732#endif
733
734
743__STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
744{
745#if (!((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
746 (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) && \
747 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
748 // without main extensions, the non-secure MSPLIM is RAZ/WI
749 (void)MainStackPtrLimit;
750#else
751 __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
752#endif
753}
754
755
756#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
765__STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
766{
767#if (!((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
768 (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) )
769 // without main extensions, the non-secure MSPLIM is RAZ/WI
770 (void)MainStackPtrLimit;
771#else
772 __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
773#endif
774}
775#endif
776
777#endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
778 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) || \
779 (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) */
780
786#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
787 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
788#define __get_FPSCR (uint32_t)__builtin_arm_get_fpscr
789#else
790#define __get_FPSCR() ((uint32_t)0U)
791#endif
792
798#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
799 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
800#define __set_FPSCR __builtin_arm_set_fpscr
801#else
802#define __set_FPSCR(x) ((void)(x))
803#endif
804
805
809/* ########################## Core Instruction Access ######################### */
815/* Define macros for porting to both thumb1 and thumb2.
816 * For thumb1, use low register (r0-r7), specified by constraint "l"
817 * Otherwise, use general registers, specified by constraint "r" */
818#if defined (__thumb__) && !defined (__thumb2__)
819#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
820#define __CMSIS_GCC_RW_REG(r) "+l" (r)
821#define __CMSIS_GCC_USE_REG(r) "l" (r)
822#else
823#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
824#define __CMSIS_GCC_RW_REG(r) "+r" (r)
825#define __CMSIS_GCC_USE_REG(r) "r" (r)
826#endif
827
832#define __NOP __builtin_arm_nop
833
838#define __WFI __builtin_arm_wfi
839
840
846#define __WFE __builtin_arm_wfe
847
848
853#define __SEV __builtin_arm_sev
854
855
862#define __ISB() __builtin_arm_isb(0xF)
863
869#define __DSB() __builtin_arm_dsb(0xF)
870
871
877#define __DMB() __builtin_arm_dmb(0xF)
878
879
886#define __REV(value) __builtin_bswap32(value)
887
888
895#define __REV16(value) __ROR(__REV(value), 16)
896
897
904#define __REVSH(value) (int16_t)__builtin_bswap16(value)
905
906
914__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
915{
916 op2 %= 32U;
917 if (op2 == 0U)
918 {
919 return op1;
920 }
921 return (op1 >> op2) | (op1 << (32U - op2));
922}
923
924
932#define __BKPT(value) __ASM volatile ("bkpt "#value)
933
934
941#define __RBIT __builtin_arm_rbit
942
949__STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
950{
951 /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
952 __builtin_clz(0) is undefined behaviour, so handle this case specially.
953 This guarantees ARM-compatible results if happening to compile on a non-ARM
954 target, and ensures the compiler doesn't decide to activate any
955 optimisations using the logic "value was passed to __builtin_clz, so it
956 is non-zero".
957 ARM Compiler 6.10 and possibly earlier will optimise this test away, leaving a
958 single CLZ instruction.
959 */
960 if (value == 0U)
961 {
962 return 32U;
963 }
964 return __builtin_clz(value);
965}
966
967
968#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
969 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
970 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
971 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) || \
972 (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) )
973
980#define __LDREXB (uint8_t)__builtin_arm_ldrex
981
982
989#define __LDREXH (uint16_t)__builtin_arm_ldrex
990
991
998#define __LDREXW (uint32_t)__builtin_arm_ldrex
999
1000
1009#define __STREXB (uint32_t)__builtin_arm_strex
1010
1011
1020#define __STREXH (uint32_t)__builtin_arm_strex
1021
1022
1031#define __STREXW (uint32_t)__builtin_arm_strex
1032
1033
1038#define __CLREX __builtin_arm_clrex
1039
1040#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1041 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1042 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1043 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) || \
1044 (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) */
1045
1046
1047#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1048 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1049 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1050 (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) )
1051
1059#define __SSAT __builtin_arm_ssat
1060
1061
1069#define __USAT __builtin_arm_usat
1070
1071
1079__STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1080{
1081 uint32_t result;
1082
1083 __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1084 return(result);
1085}
1086
1087
1094__STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1095{
1096 uint32_t result;
1097
1098 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1099 return ((uint8_t) result); /* Add explicit type cast here */
1100}
1101
1102
1109__STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1110{
1111 uint32_t result;
1112
1113 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1114 return ((uint16_t) result); /* Add explicit type cast here */
1115}
1116
1117
1124__STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
1125{
1126 uint32_t result;
1127
1128 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1129 return(result);
1130}
1131
1132
1139__STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1140{
1141 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1142}
1143
1144
1151__STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1152{
1153 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1154}
1155
1156
1163__STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1164{
1165 __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1166}
1167
1168#else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1169 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1170 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1171 (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) */
1172
1180__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
1181{
1182 if ((sat >= 1U) && (sat <= 32U))
1183 {
1184 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1185 const int32_t min = -1 - max ;
1186 if (val > max)
1187 {
1188 return max;
1189 }
1190 else if (val < min)
1191 {
1192 return min;
1193 }
1194 }
1195 return val;
1196}
1197
1205__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
1206{
1207 if (sat <= 31U)
1208 {
1209 const uint32_t max = ((1U << sat) - 1U);
1210 if (val > (int32_t)max)
1211 {
1212 return max;
1213 }
1214 else if (val < 0)
1215 {
1216 return 0U;
1217 }
1218 }
1219 return (uint32_t)val;
1220}
1221
1222#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1223 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1224 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1225 (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) */
1226
1227
1228#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1229 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) || \
1230 (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) )
1231
1238__STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
1239{
1240 uint32_t result;
1241
1242 __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
1243 return ((uint8_t) result);
1244}
1245
1246
1253__STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
1254{
1255 uint32_t result;
1256
1257 __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
1258 return ((uint16_t) result);
1259}
1260
1261
1268__STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
1269{
1270 uint32_t result;
1271
1272 __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
1273 return(result);
1274}
1275
1276
1283__STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1284{
1285 __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
1286}
1287
1288
1295__STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1296{
1297 __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
1298}
1299
1300
1307__STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1308{
1309 __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
1310}
1311
1312
1319#define __LDAEXB (uint8_t)__builtin_arm_ldaex
1320
1321
1328#define __LDAEXH (uint16_t)__builtin_arm_ldaex
1329
1330
1337#define __LDAEX (uint32_t)__builtin_arm_ldaex
1338
1339
1348#define __STLEXB (uint32_t)__builtin_arm_stlex
1349
1350
1359#define __STLEXH (uint32_t)__builtin_arm_stlex
1360
1361
1370#define __STLEX (uint32_t)__builtin_arm_stlex
1371
1372#endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1373 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) || \
1374 (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) */
1375 /* end of group CMSIS_Core_InstructionInterface */
1377
1378
1379/* ################### Compiler specific Intrinsics ########################### */
1385#if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1386
1387#define __SADD8 __builtin_arm_sadd8
1388#define __QADD8 __builtin_arm_qadd8
1389#define __SHADD8 __builtin_arm_shadd8
1390#define __UADD8 __builtin_arm_uadd8
1391#define __UQADD8 __builtin_arm_uqadd8
1392#define __UHADD8 __builtin_arm_uhadd8
1393#define __SSUB8 __builtin_arm_ssub8
1394#define __QSUB8 __builtin_arm_qsub8
1395#define __SHSUB8 __builtin_arm_shsub8
1396#define __USUB8 __builtin_arm_usub8
1397#define __UQSUB8 __builtin_arm_uqsub8
1398#define __UHSUB8 __builtin_arm_uhsub8
1399#define __SADD16 __builtin_arm_sadd16
1400#define __QADD16 __builtin_arm_qadd16
1401#define __SHADD16 __builtin_arm_shadd16
1402#define __UADD16 __builtin_arm_uadd16
1403#define __UQADD16 __builtin_arm_uqadd16
1404#define __UHADD16 __builtin_arm_uhadd16
1405#define __SSUB16 __builtin_arm_ssub16
1406#define __QSUB16 __builtin_arm_qsub16
1407#define __SHSUB16 __builtin_arm_shsub16
1408#define __USUB16 __builtin_arm_usub16
1409#define __UQSUB16 __builtin_arm_uqsub16
1410#define __UHSUB16 __builtin_arm_uhsub16
1411#define __SASX __builtin_arm_sasx
1412#define __QASX __builtin_arm_qasx
1413#define __SHASX __builtin_arm_shasx
1414#define __UASX __builtin_arm_uasx
1415#define __UQASX __builtin_arm_uqasx
1416#define __UHASX __builtin_arm_uhasx
1417#define __SSAX __builtin_arm_ssax
1418#define __QSAX __builtin_arm_qsax
1419#define __SHSAX __builtin_arm_shsax
1420#define __USAX __builtin_arm_usax
1421#define __UQSAX __builtin_arm_uqsax
1422#define __UHSAX __builtin_arm_uhsax
1423#define __USAD8 __builtin_arm_usad8
1424#define __USADA8 __builtin_arm_usada8
1425#define __SSAT16 __builtin_arm_ssat16
1426#define __USAT16 __builtin_arm_usat16
1427#define __UXTB16 __builtin_arm_uxtb16
1428#define __UXTAB16 __builtin_arm_uxtab16
1429#define __SXTB16 __builtin_arm_sxtb16
1430#define __SXTAB16 __builtin_arm_sxtab16
1431#define __SMUAD __builtin_arm_smuad
1432#define __SMUADX __builtin_arm_smuadx
1433#define __SMLAD __builtin_arm_smlad
1434#define __SMLADX __builtin_arm_smladx
1435#define __SMLALD __builtin_arm_smlald
1436#define __SMLALDX __builtin_arm_smlaldx
1437#define __SMUSD __builtin_arm_smusd
1438#define __SMUSDX __builtin_arm_smusdx
1439#define __SMLSD __builtin_arm_smlsd
1440#define __SMLSDX __builtin_arm_smlsdx
1441#define __SMLSLD __builtin_arm_smlsld
1442#define __SMLSLDX __builtin_arm_smlsldx
1443#define __SEL __builtin_arm_sel
1444#define __QADD __builtin_arm_qadd
1445#define __QSUB __builtin_arm_qsub
1446
1447#define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
1448 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
1449
1450#define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
1451 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
1452
1453#define __SXTB16_RORn(ARG1, ARG2) __SXTB16(__ROR(ARG1, ARG2))
1454
1455__STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1456{
1457 int32_t result;
1458
1459 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
1460 return(result);
1461}
1462
1463#endif /* (__ARM_FEATURE_DSP == 1) */
1467#endif /* __CMSIS_ARMCLANG_H */
static __inline uint32_t __get_xPSR(void)
Get xPSR Register.
Definition: cmsis_armclang.h:248
static __inline uint32_t __get_PSP(void)
Get Process Stack Pointer.
Definition: cmsis_armclang.h:262
static __inline void __set_CONTROL(uint32_t control)
Set Control Register.
Definition: cmsis_armclang.h:196
static __inline uint32_t __get_MSP(void)
Get Main Stack Pointer.
Definition: cmsis_armclang.h:316
static __inline uint32_t __get_CONTROL(void)
Enable IRQ Interrupts.
Definition: cmsis_armclang.h:166
static __inline uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
Definition: cmsis_armclang.h:1205
static __inline void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
Definition: cmsis_armclang.h:346
static __inline uint32_t __get_PRIMASK(void)
Get Priority Mask.
Definition: cmsis_armclang.h:397
static __inline uint32_t __get_IPSR(void)
Get IPSR Register.
Definition: cmsis_armclang.h:220
static __inline uint32_t __get_APSR(void)
Get APSR Register.
Definition: cmsis_armclang.h:234
static __inline void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
Definition: cmsis_armclang.h:427
static __inline void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
Definition: cmsis_armclang.h:292
static __inline int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
Definition: cmsis_armclang.h:1180
static __inline uint8_t __CLZ(uint32_t value)
Count leading zeros.
Definition: cmsis_armclang.h:949
static __inline uint32_t __ROR(uint32_t op1, uint32_t op2)
Rotate Right in unsigned value (32 bit)
Definition: cmsis_armclang.h:914
Definition: cmsis_armclang.h:87
Definition: cmsis_armclang.h:79
Definition: cmsis_armclang.h:103
Definition: cmsis_armclang.h:95
Definition: cmsis_armclang.h:71