27#ifndef __CMSIS_ARMCLANG_H
28#define __CMSIS_ARMCLANG_H
30#pragma clang system_header
33#include <arm_compat.h>
41 #define __INLINE __inline
43#ifndef __STATIC_INLINE
44 #define __STATIC_INLINE static __inline
46#ifndef __STATIC_FORCEINLINE
47 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline
50 #define __NO_RETURN __attribute__((__noreturn__))
53 #define __USED __attribute__((used))
56 #define __WEAK __attribute__((weak))
59 #define __PACKED __attribute__((packed, aligned(1)))
61#ifndef __PACKED_STRUCT
62 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
65 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
67#ifndef __UNALIGNED_UINT32
68 #pragma clang diagnostic push
69 #pragma clang diagnostic ignored "-Wpacked"
71 struct __attribute__((packed))
T_UINT32 { uint32_t v; };
72 #pragma clang diagnostic pop
73 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
75#ifndef __UNALIGNED_UINT16_WRITE
76 #pragma clang diagnostic push
77 #pragma clang diagnostic ignored "-Wpacked"
80 #pragma clang diagnostic pop
81 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
83#ifndef __UNALIGNED_UINT16_READ
84 #pragma clang diagnostic push
85 #pragma clang diagnostic ignored "-Wpacked"
88 #pragma clang diagnostic pop
89 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
91#ifndef __UNALIGNED_UINT32_WRITE
92 #pragma clang diagnostic push
93 #pragma clang diagnostic ignored "-Wpacked"
96 #pragma clang diagnostic pop
97 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
99#ifndef __UNALIGNED_UINT32_READ
100 #pragma clang diagnostic push
101 #pragma clang diagnostic ignored "-Wpacked"
104 #pragma clang diagnostic pop
105 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
108 #define __ALIGNED(x) __attribute__((aligned(x)))
111 #define __RESTRICT __restrict
113#ifndef __COMPILER_BARRIER
114 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
119#ifndef __PROGRAM_START
120#define __PROGRAM_START __main
124#define __INITIAL_SP Image$$ARM_LIB_STACK$$ZI$$Limit
128#define __STACK_LIMIT Image$$ARM_LIB_STACK$$ZI$$Base
131#ifndef __VECTOR_TABLE
132#define __VECTOR_TABLE __Vectors
135#ifndef __VECTOR_TABLE_ATTRIBUTE
136#define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section("RESET")))
171 __ASM
volatile (
"MRS %0, control" :
"=r" (result) );
176#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
182__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(
void)
186 __ASM
volatile (
"MRS %0, control_ns" :
"=r" (result) );
199 __ASM
volatile (
"MSR control, %0" : :
"r" (control) :
"memory");
203#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
209__STATIC_FORCEINLINE
void __TZ_set_CONTROL_NS(uint32_t control)
211 __ASM
volatile (
"MSR control_ns, %0" : :
"r" (control) :
"memory");
225 __ASM
volatile (
"MRS %0, ipsr" :
"=r" (result) );
239 __ASM
volatile (
"MRS %0, apsr" :
"=r" (result) );
253 __ASM
volatile (
"MRS %0, xpsr" :
"=r" (result) );
267 __ASM
volatile (
"MRS %0, psp" :
"=r" (result) );
272#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
278__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(
void)
282 __ASM
volatile (
"MRS %0, psp_ns" :
"=r" (result) );
293__STATIC_FORCEINLINE
void __set_PSP(uint32_t topOfProcStack)
295 __ASM
volatile (
"MSR psp, %0" : :
"r" (topOfProcStack) : );
299#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
305__STATIC_FORCEINLINE
void __TZ_set_PSP_NS(uint32_t topOfProcStack)
307 __ASM
volatile (
"MSR psp_ns, %0" : :
"r" (topOfProcStack) : );
321 __ASM
volatile (
"MRS %0, msp" :
"=r" (result) );
326#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
332__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(
void)
336 __ASM
volatile (
"MRS %0, msp_ns" :
"=r" (result) );
347__STATIC_FORCEINLINE
void __set_MSP(uint32_t topOfMainStack)
349 __ASM
volatile (
"MSR msp, %0" : :
"r" (topOfMainStack) : );
353#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
359__STATIC_FORCEINLINE
void __TZ_set_MSP_NS(uint32_t topOfMainStack)
361 __ASM
volatile (
"MSR msp_ns, %0" : :
"r" (topOfMainStack) : );
366#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
372__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(
void)
376 __ASM
volatile (
"MRS %0, sp_ns" :
"=r" (result) );
386__STATIC_FORCEINLINE
void __TZ_set_SP_NS(uint32_t topOfStack)
388 __ASM
volatile (
"MSR sp_ns, %0" : :
"r" (topOfStack) : );
402 __ASM
volatile (
"MRS %0, primask" :
"=r" (result) );
407#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
413__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(
void)
417 __ASM
volatile (
"MRS %0, primask_ns" :
"=r" (result) );
430 __ASM
volatile (
"MSR primask, %0" : :
"r" (priMask) :
"memory");
434#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
440__STATIC_FORCEINLINE
void __TZ_set_PRIMASK_NS(uint32_t priMask)
442 __ASM
volatile (
"MSR primask_ns, %0" : :
"r" (priMask) :
"memory");
447#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
448 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
449 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
455#define __enable_fault_irq __enable_fiq
463#define __disable_fault_irq __disable_fiq
471__STATIC_FORCEINLINE uint32_t __get_BASEPRI(
void)
475 __ASM
volatile (
"MRS %0, basepri" :
"=r" (result) );
480#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
486__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(
void)
490 __ASM
volatile (
"MRS %0, basepri_ns" :
"=r" (result) );
501__STATIC_FORCEINLINE
void __set_BASEPRI(uint32_t basePri)
503 __ASM
volatile (
"MSR basepri, %0" : :
"r" (basePri) :
"memory");
507#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
513__STATIC_FORCEINLINE
void __TZ_set_BASEPRI_NS(uint32_t basePri)
515 __ASM
volatile (
"MSR basepri_ns, %0" : :
"r" (basePri) :
"memory");
526__STATIC_FORCEINLINE
void __set_BASEPRI_MAX(uint32_t basePri)
528 __ASM
volatile (
"MSR basepri_max, %0" : :
"r" (basePri) :
"memory");
537__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(
void)
541 __ASM
volatile (
"MRS %0, faultmask" :
"=r" (result) );
546#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
552__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(
void)
556 __ASM
volatile (
"MRS %0, faultmask_ns" :
"=r" (result) );
567__STATIC_FORCEINLINE
void __set_FAULTMASK(uint32_t faultMask)
569 __ASM
volatile (
"MSR faultmask, %0" : :
"r" (faultMask) :
"memory");
573#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
579__STATIC_FORCEINLINE
void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
581 __ASM
volatile (
"MSR faultmask_ns, %0" : :
"r" (faultMask) :
"memory");
590#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
591 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
602__STATIC_FORCEINLINE uint32_t __get_PSPLIM(
void)
604#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
605 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
610 __ASM
volatile (
"MRS %0, psplim" :
"=r" (result) );
615#if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
625__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(
void)
627#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
632 __ASM
volatile (
"MRS %0, psplim_ns" :
"=r" (result) );
648__STATIC_FORCEINLINE
void __set_PSPLIM(uint32_t ProcStackPtrLimit)
650#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
651 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
653 (void)ProcStackPtrLimit;
655 __ASM
volatile (
"MSR psplim, %0" : :
"r" (ProcStackPtrLimit));
660#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
670__STATIC_FORCEINLINE
void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
672#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
674 (void)ProcStackPtrLimit;
676 __ASM
volatile (
"MSR psplim_ns, %0\n" : :
"r" (ProcStackPtrLimit));
690__STATIC_FORCEINLINE uint32_t __get_MSPLIM(
void)
692#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
693 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
698 __ASM
volatile (
"MRS %0, msplim" :
"=r" (result) );
704#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
713__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(
void)
715#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
720 __ASM
volatile (
"MRS %0, msplim_ns" :
"=r" (result) );
735__STATIC_FORCEINLINE
void __set_MSPLIM(uint32_t MainStackPtrLimit)
737#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
738 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
740 (void)MainStackPtrLimit;
742 __ASM
volatile (
"MSR msplim, %0" : :
"r" (MainStackPtrLimit));
747#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
756__STATIC_FORCEINLINE
void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
758#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
760 (void)MainStackPtrLimit;
762 __ASM
volatile (
"MSR msplim_ns, %0" : :
"r" (MainStackPtrLimit));
775#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
776 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
777#define __get_FPSCR (uint32_t)__builtin_arm_get_fpscr
779#define __get_FPSCR() ((uint32_t)0U)
787#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
788 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
789#define __set_FPSCR __builtin_arm_set_fpscr
791#define __set_FPSCR(x) ((void)(x))
807#if defined (__thumb__) && !defined (__thumb2__)
808#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
809#define __CMSIS_GCC_USE_REG(r) "l" (r)
811#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
812#define __CMSIS_GCC_USE_REG(r) "r" (r)
819#define __NOP __builtin_arm_nop
825#define __WFI __builtin_arm_wfi
833#define __WFE __builtin_arm_wfe
840#define __SEV __builtin_arm_sev
849#define __ISB() __builtin_arm_isb(0xF)
856#define __DSB() __builtin_arm_dsb(0xF)
864#define __DMB() __builtin_arm_dmb(0xF)
873#define __REV(value) __builtin_bswap32(value)
882#define __REV16(value) __ROR(__REV(value), 16)
891#define __REVSH(value) (int16_t)__builtin_bswap16(value)
901__STATIC_FORCEINLINE uint32_t
__ROR(uint32_t op1, uint32_t op2)
908 return (op1 >> op2) | (op1 << (32U - op2));
919#define __BKPT(value) __ASM volatile ("bkpt "#value)
928#define __RBIT __builtin_arm_rbit
936__STATIC_FORCEINLINE uint8_t
__CLZ(uint32_t value)
951 return __builtin_clz(value);
955#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
956 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
957 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
958 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
965#define __LDREXB (uint8_t)__builtin_arm_ldrex
974#define __LDREXH (uint16_t)__builtin_arm_ldrex
983#define __LDREXW (uint32_t)__builtin_arm_ldrex
994#define __STREXB (uint32_t)__builtin_arm_strex
1005#define __STREXH (uint32_t)__builtin_arm_strex
1016#define __STREXW (uint32_t)__builtin_arm_strex
1023#define __CLREX __builtin_arm_clrex
1031#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1032 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1033 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1042#define __SSAT __builtin_arm_ssat
1052#define __USAT __builtin_arm_usat
1062__STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1066 __ASM
volatile (
"rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1077__STATIC_FORCEINLINE uint8_t __LDRBT(
volatile uint8_t *ptr)
1081 __ASM
volatile (
"ldrbt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1082 return ((uint8_t) result);
1092__STATIC_FORCEINLINE uint16_t __LDRHT(
volatile uint16_t *ptr)
1096 __ASM
volatile (
"ldrht %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1097 return ((uint16_t) result);
1107__STATIC_FORCEINLINE uint32_t __LDRT(
volatile uint32_t *ptr)
1111 __ASM
volatile (
"ldrt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1122__STATIC_FORCEINLINE
void __STRBT(uint8_t value,
volatile uint8_t *ptr)
1124 __ASM
volatile (
"strbt %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1134__STATIC_FORCEINLINE
void __STRHT(uint16_t value,
volatile uint16_t *ptr)
1136 __ASM
volatile (
"strht %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1146__STATIC_FORCEINLINE
void __STRT(uint32_t value,
volatile uint32_t *ptr)
1148 __ASM
volatile (
"strt %1, %0" :
"=Q" (*ptr) :
"r" (value) );
1162__STATIC_FORCEINLINE int32_t
__SSAT(int32_t val, uint32_t sat)
1164 if ((sat >= 1U) && (sat <= 32U))
1166 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1167 const int32_t min = -1 - max ;
1187__STATIC_FORCEINLINE uint32_t
__USAT(int32_t val, uint32_t sat)
1191 const uint32_t max = ((1U << sat) - 1U);
1192 if (val > (int32_t)max)
1201 return (uint32_t)val;
1209#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1210 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1217__STATIC_FORCEINLINE uint8_t __LDAB(
volatile uint8_t *ptr)
1221 __ASM
volatile (
"ldab %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
1222 return ((uint8_t) result);
1232__STATIC_FORCEINLINE uint16_t __LDAH(
volatile uint16_t *ptr)
1236 __ASM
volatile (
"ldah %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
1237 return ((uint16_t) result);
1247__STATIC_FORCEINLINE uint32_t __LDA(
volatile uint32_t *ptr)
1251 __ASM
volatile (
"lda %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
1262__STATIC_FORCEINLINE
void __STLB(uint8_t value,
volatile uint8_t *ptr)
1264 __ASM
volatile (
"stlb %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
1274__STATIC_FORCEINLINE
void __STLH(uint16_t value,
volatile uint16_t *ptr)
1276 __ASM
volatile (
"stlh %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
1286__STATIC_FORCEINLINE
void __STL(uint32_t value,
volatile uint32_t *ptr)
1288 __ASM
volatile (
"stl %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
1298#define __LDAEXB (uint8_t)__builtin_arm_ldaex
1307#define __LDAEXH (uint16_t)__builtin_arm_ldaex
1316#define __LDAEX (uint32_t)__builtin_arm_ldaex
1327#define __STLEXB (uint32_t)__builtin_arm_stlex
1338#define __STLEXH (uint32_t)__builtin_arm_stlex
1349#define __STLEX (uint32_t)__builtin_arm_stlex
1363#if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1365__STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1369 __ASM
volatile (
"sadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1373__STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1377 __ASM
volatile (
"qadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1381__STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1385 __ASM
volatile (
"shadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1389__STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1393 __ASM
volatile (
"uadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1397__STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1401 __ASM
volatile (
"uqadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1405__STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1409 __ASM
volatile (
"uhadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1414__STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1418 __ASM
volatile (
"ssub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1422__STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1426 __ASM
volatile (
"qsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1430__STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1434 __ASM
volatile (
"shsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1438__STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1442 __ASM
volatile (
"usub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1446__STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1450 __ASM
volatile (
"uqsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1454__STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1458 __ASM
volatile (
"uhsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1463__STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1467 __ASM
volatile (
"sadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1471__STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1475 __ASM
volatile (
"qadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1479__STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1483 __ASM
volatile (
"shadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1487__STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1491 __ASM
volatile (
"uadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1495__STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1499 __ASM
volatile (
"uqadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1503__STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1507 __ASM
volatile (
"uhadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1511__STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1515 __ASM
volatile (
"ssub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1519__STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1523 __ASM
volatile (
"qsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1527__STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1531 __ASM
volatile (
"shsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1535__STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1539 __ASM
volatile (
"usub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1543__STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1547 __ASM
volatile (
"uqsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1551__STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1555 __ASM
volatile (
"uhsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1559__STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1563 __ASM
volatile (
"sasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1567__STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1571 __ASM
volatile (
"qasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1575__STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1579 __ASM
volatile (
"shasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1583__STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1587 __ASM
volatile (
"uasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1591__STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1595 __ASM
volatile (
"uqasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1599__STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1603 __ASM
volatile (
"uhasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1607__STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1611 __ASM
volatile (
"ssax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1615__STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1619 __ASM
volatile (
"qsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1623__STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1627 __ASM
volatile (
"shsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1631__STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1635 __ASM
volatile (
"usax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1639__STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1643 __ASM
volatile (
"uqsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1647__STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1651 __ASM
volatile (
"uhsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1655__STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1659 __ASM
volatile (
"usad8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1663__STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1667 __ASM
volatile (
"usada8 %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1671#define __SSAT16(ARG1,ARG2) \
1673 int32_t __RES, __ARG1 = (ARG1); \
1674 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1678#define __USAT16(ARG1,ARG2) \
1680 uint32_t __RES, __ARG1 = (ARG1); \
1681 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1685__STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1689 __ASM
volatile (
"uxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1693__STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1697 __ASM
volatile (
"uxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1701__STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1705 __ASM
volatile (
"sxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1709__STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1713 __ASM
volatile (
"sxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1717__STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1721 __ASM
volatile (
"smuad %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1725__STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1729 __ASM
volatile (
"smuadx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1733__STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1737 __ASM
volatile (
"smlad %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1741__STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1745 __ASM
volatile (
"smladx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1749__STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1758 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1760 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1766__STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1775 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1777 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1783__STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
1787 __ASM
volatile (
"smusd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1791__STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1795 __ASM
volatile (
"smusdx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1799__STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1803 __ASM
volatile (
"smlsd %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1807__STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1811 __ASM
volatile (
"smlsdx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1815__STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1824 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1826 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1832__STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
1841 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1843 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1849__STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
1853 __ASM
volatile (
"sel %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1857__STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
1861 __ASM
volatile (
"qadd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1865__STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
1869 __ASM
volatile (
"qsub %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1873#define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
1874 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
1876#define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
1877 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
1879#define __SXTB16_RORn(ARG1, ARG2) __SXTB16(__ROR(ARG1, ARG2))
1881__STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1885 __ASM
volatile (
"smmla %0, %1, %2, %3" :
"=r" (result):
"r" (op1),
"r" (op2),
"r" (op3) );
static __inline uint32_t __get_xPSR(void)
Get xPSR Register.
Definition: cmsis_armclang_ltm.h:249
static __inline uint32_t __get_PSP(void)
Get Process Stack Pointer.
Definition: cmsis_armclang_ltm.h:263
static __inline uint8_t __CLZ(uint32_t value)
Count leading zeros.
Definition: cmsis_armclang_ltm.h:936
static __inline void __set_CONTROL(uint32_t control)
Set Control Register.
Definition: cmsis_armclang_ltm.h:197
static __inline uint32_t __get_MSP(void)
Get Main Stack Pointer.
Definition: cmsis_armclang_ltm.h:317
static __inline uint32_t __get_CONTROL(void)
Enable IRQ Interrupts.
Definition: cmsis_armclang_ltm.h:167
static __inline uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
Definition: cmsis_armclang_ltm.h:1187
static __inline void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
Definition: cmsis_armclang_ltm.h:347
static __inline uint32_t __get_PRIMASK(void)
Get Priority Mask.
Definition: cmsis_armclang_ltm.h:398
static __inline uint32_t __get_IPSR(void)
Get IPSR Register.
Definition: cmsis_armclang_ltm.h:221
static __inline uint32_t __ROR(uint32_t op1, uint32_t op2)
Rotate Right in unsigned value (32 bit)
Definition: cmsis_armclang_ltm.h:901
static __inline uint32_t __get_APSR(void)
Get APSR Register.
Definition: cmsis_armclang_ltm.h:235
static __inline void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
Definition: cmsis_armclang_ltm.h:428
static __inline void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
Definition: cmsis_armclang_ltm.h:293
static __inline int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
Definition: cmsis_armclang_ltm.h:1162
Definition: cmsis_armclang.h:87
Definition: cmsis_armclang.h:79
Definition: cmsis_armclang.h:103
Definition: cmsis_armclang.h:95
Definition: cmsis_armclang.h:71