diff options
Diffstat (limited to 'lib/chibios-contrib/ext/mcux-sdk/CMSIS/Include/cmsis_iccarm.h')
-rw-r--r-- | lib/chibios-contrib/ext/mcux-sdk/CMSIS/Include/cmsis_iccarm.h | 964 |
1 files changed, 964 insertions, 0 deletions
diff --git a/lib/chibios-contrib/ext/mcux-sdk/CMSIS/Include/cmsis_iccarm.h b/lib/chibios-contrib/ext/mcux-sdk/CMSIS/Include/cmsis_iccarm.h new file mode 100644 index 000000000..12d68fd9a --- /dev/null +++ b/lib/chibios-contrib/ext/mcux-sdk/CMSIS/Include/cmsis_iccarm.h | |||
@@ -0,0 +1,964 @@ | |||
1 | /**************************************************************************//** | ||
2 | * @file cmsis_iccarm.h | ||
3 | * @brief CMSIS compiler ICCARM (IAR Compiler for Arm) header file | ||
4 | * @version V5.1.0 | ||
5 | * @date 08. May 2019 | ||
6 | ******************************************************************************/ | ||
7 | |||
8 | //------------------------------------------------------------------------------ | ||
9 | // | ||
10 | // Copyright (c) 2017-2019 IAR Systems | ||
11 | // Copyright (c) 2017-2019 Arm Limited. All rights reserved. | ||
12 | // | ||
13 | // Licensed under the Apache License, Version 2.0 (the "License") | ||
14 | // you may not use this file except in compliance with the License. | ||
15 | // You may obtain a copy of the License at | ||
16 | // http://www.apache.org/licenses/LICENSE-2.0 | ||
17 | // | ||
18 | // Unless required by applicable law or agreed to in writing, software | ||
19 | // distributed under the License is distributed on an "AS IS" BASIS, | ||
20 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
21 | // See the License for the specific language governing permissions and | ||
22 | // limitations under the License. | ||
23 | // | ||
24 | //------------------------------------------------------------------------------ | ||
25 | |||
26 | |||
27 | #ifndef __CMSIS_ICCARM_H__ | ||
28 | #define __CMSIS_ICCARM_H__ | ||
29 | |||
30 | #ifndef __ICCARM__ | ||
31 | #error This file should only be compiled by ICCARM | ||
32 | #endif | ||
33 | |||
34 | #pragma system_include | ||
35 | |||
36 | #define __IAR_FT _Pragma("inline=forced") __intrinsic | ||
37 | |||
38 | #if (__VER__ >= 8000000) | ||
39 | #define __ICCARM_V8 1 | ||
40 | #else | ||
41 | #define __ICCARM_V8 0 | ||
42 | #endif | ||
43 | |||
44 | #ifndef __ALIGNED | ||
45 | #if __ICCARM_V8 | ||
46 | #define __ALIGNED(x) __attribute__((aligned(x))) | ||
47 | #elif (__VER__ >= 7080000) | ||
48 | /* Needs IAR language extensions */ | ||
49 | #define __ALIGNED(x) __attribute__((aligned(x))) | ||
50 | #else | ||
51 | #warning No compiler specific solution for __ALIGNED.__ALIGNED is ignored. | ||
52 | #define __ALIGNED(x) | ||
53 | #endif | ||
54 | #endif | ||
55 | |||
56 | |||
57 | /* Define compiler macros for CPU architecture, used in CMSIS 5. | ||
58 | */ | ||
59 | #if __ARM_ARCH_6M__ || __ARM_ARCH_7M__ || __ARM_ARCH_7EM__ || __ARM_ARCH_8M_BASE__ || __ARM_ARCH_8M_MAIN__ | ||
60 | /* Macros already defined */ | ||
61 | #else | ||
62 | #if defined(__ARM8M_MAINLINE__) || defined(__ARM8EM_MAINLINE__) | ||
63 | #define __ARM_ARCH_8M_MAIN__ 1 | ||
64 | #elif defined(__ARM8M_BASELINE__) | ||
65 | #define __ARM_ARCH_8M_BASE__ 1 | ||
66 | #elif defined(__ARM_ARCH_PROFILE) && __ARM_ARCH_PROFILE == 'M' | ||
67 | #if __ARM_ARCH == 6 | ||
68 | #define __ARM_ARCH_6M__ 1 | ||
69 | #elif __ARM_ARCH == 7 | ||
70 | #if __ARM_FEATURE_DSP | ||
71 | #define __ARM_ARCH_7EM__ 1 | ||
72 | #else | ||
73 | #define __ARM_ARCH_7M__ 1 | ||
74 | #endif | ||
75 | #endif /* __ARM_ARCH */ | ||
76 | #endif /* __ARM_ARCH_PROFILE == 'M' */ | ||
77 | #endif | ||
78 | |||
79 | /* Alternativ core deduction for older ICCARM's */ | ||
80 | #if !defined(__ARM_ARCH_6M__) && !defined(__ARM_ARCH_7M__) && !defined(__ARM_ARCH_7EM__) && \ | ||
81 | !defined(__ARM_ARCH_8M_BASE__) && !defined(__ARM_ARCH_8M_MAIN__) | ||
82 | #if defined(__ARM6M__) && (__CORE__ == __ARM6M__) | ||
83 | #define __ARM_ARCH_6M__ 1 | ||
84 | #elif defined(__ARM7M__) && (__CORE__ == __ARM7M__) | ||
85 | #define __ARM_ARCH_7M__ 1 | ||
86 | #elif defined(__ARM7EM__) && (__CORE__ == __ARM7EM__) | ||
87 | #define __ARM_ARCH_7EM__ 1 | ||
88 | #elif defined(__ARM8M_BASELINE__) && (__CORE == __ARM8M_BASELINE__) | ||
89 | #define __ARM_ARCH_8M_BASE__ 1 | ||
90 | #elif defined(__ARM8M_MAINLINE__) && (__CORE == __ARM8M_MAINLINE__) | ||
91 | #define __ARM_ARCH_8M_MAIN__ 1 | ||
92 | #elif defined(__ARM8EM_MAINLINE__) && (__CORE == __ARM8EM_MAINLINE__) | ||
93 | #define __ARM_ARCH_8M_MAIN__ 1 | ||
94 | #else | ||
95 | #error "Unknown target." | ||
96 | #endif | ||
97 | #endif | ||
98 | |||
99 | |||
100 | |||
101 | #if defined(__ARM_ARCH_6M__) && __ARM_ARCH_6M__==1 | ||
102 | #define __IAR_M0_FAMILY 1 | ||
103 | #elif defined(__ARM_ARCH_8M_BASE__) && __ARM_ARCH_8M_BASE__==1 | ||
104 | #define __IAR_M0_FAMILY 1 | ||
105 | #else | ||
106 | #define __IAR_M0_FAMILY 0 | ||
107 | #endif | ||
108 | |||
109 | |||
110 | #ifndef __ASM | ||
111 | #define __ASM __asm | ||
112 | #endif | ||
113 | |||
114 | #ifndef __COMPILER_BARRIER | ||
115 | #define __COMPILER_BARRIER() __ASM volatile("":::"memory") | ||
116 | #endif | ||
117 | |||
118 | #ifndef __INLINE | ||
119 | #define __INLINE inline | ||
120 | #endif | ||
121 | |||
122 | #ifndef __NO_RETURN | ||
123 | #if __ICCARM_V8 | ||
124 | #define __NO_RETURN __attribute__((__noreturn__)) | ||
125 | #else | ||
126 | #define __NO_RETURN _Pragma("object_attribute=__noreturn") | ||
127 | #endif | ||
128 | #endif | ||
129 | |||
130 | #ifndef __PACKED | ||
131 | #if __ICCARM_V8 | ||
132 | #define __PACKED __attribute__((packed, aligned(1))) | ||
133 | #else | ||
134 | /* Needs IAR language extensions */ | ||
135 | #define __PACKED __packed | ||
136 | #endif | ||
137 | #endif | ||
138 | |||
139 | #ifndef __PACKED_STRUCT | ||
140 | #if __ICCARM_V8 | ||
141 | #define __PACKED_STRUCT struct __attribute__((packed, aligned(1))) | ||
142 | #else | ||
143 | /* Needs IAR language extensions */ | ||
144 | #define __PACKED_STRUCT __packed struct | ||
145 | #endif | ||
146 | #endif | ||
147 | |||
148 | #ifndef __PACKED_UNION | ||
149 | #if __ICCARM_V8 | ||
150 | #define __PACKED_UNION union __attribute__((packed, aligned(1))) | ||
151 | #else | ||
152 | /* Needs IAR language extensions */ | ||
153 | #define __PACKED_UNION __packed union | ||
154 | #endif | ||
155 | #endif | ||
156 | |||
157 | #ifndef __RESTRICT | ||
158 | #if __ICCARM_V8 | ||
159 | #define __RESTRICT __restrict | ||
160 | #else | ||
161 | /* Needs IAR language extensions */ | ||
162 | #define __RESTRICT restrict | ||
163 | #endif | ||
164 | #endif | ||
165 | |||
166 | #ifndef __STATIC_INLINE | ||
167 | #define __STATIC_INLINE static inline | ||
168 | #endif | ||
169 | |||
170 | #ifndef __FORCEINLINE | ||
171 | #define __FORCEINLINE _Pragma("inline=forced") | ||
172 | #endif | ||
173 | |||
174 | #ifndef __STATIC_FORCEINLINE | ||
175 | #define __STATIC_FORCEINLINE __FORCEINLINE __STATIC_INLINE | ||
176 | #endif | ||
177 | |||
178 | #ifndef __UNALIGNED_UINT16_READ | ||
179 | #pragma language=save | ||
180 | #pragma language=extended | ||
181 | __IAR_FT uint16_t __iar_uint16_read(void const *ptr) | ||
182 | { | ||
183 | return *(__packed uint16_t*)(ptr); | ||
184 | } | ||
185 | #pragma language=restore | ||
186 | #define __UNALIGNED_UINT16_READ(PTR) __iar_uint16_read(PTR) | ||
187 | #endif | ||
188 | |||
189 | |||
190 | #ifndef __UNALIGNED_UINT16_WRITE | ||
191 | #pragma language=save | ||
192 | #pragma language=extended | ||
193 | __IAR_FT void __iar_uint16_write(void const *ptr, uint16_t val) | ||
194 | { | ||
195 | *(__packed uint16_t*)(ptr) = val;; | ||
196 | } | ||
197 | #pragma language=restore | ||
198 | #define __UNALIGNED_UINT16_WRITE(PTR,VAL) __iar_uint16_write(PTR,VAL) | ||
199 | #endif | ||
200 | |||
201 | #ifndef __UNALIGNED_UINT32_READ | ||
202 | #pragma language=save | ||
203 | #pragma language=extended | ||
204 | __IAR_FT uint32_t __iar_uint32_read(void const *ptr) | ||
205 | { | ||
206 | return *(__packed uint32_t*)(ptr); | ||
207 | } | ||
208 | #pragma language=restore | ||
209 | #define __UNALIGNED_UINT32_READ(PTR) __iar_uint32_read(PTR) | ||
210 | #endif | ||
211 | |||
212 | #ifndef __UNALIGNED_UINT32_WRITE | ||
213 | #pragma language=save | ||
214 | #pragma language=extended | ||
215 | __IAR_FT void __iar_uint32_write(void const *ptr, uint32_t val) | ||
216 | { | ||
217 | *(__packed uint32_t*)(ptr) = val;; | ||
218 | } | ||
219 | #pragma language=restore | ||
220 | #define __UNALIGNED_UINT32_WRITE(PTR,VAL) __iar_uint32_write(PTR,VAL) | ||
221 | #endif | ||
222 | |||
223 | #ifndef __UNALIGNED_UINT32 /* deprecated */ | ||
224 | #pragma language=save | ||
225 | #pragma language=extended | ||
226 | __packed struct __iar_u32 { uint32_t v; }; | ||
227 | #pragma language=restore | ||
228 | #define __UNALIGNED_UINT32(PTR) (((struct __iar_u32 *)(PTR))->v) | ||
229 | #endif | ||
230 | |||
231 | #ifndef __USED | ||
232 | #if __ICCARM_V8 | ||
233 | #define __USED __attribute__((used)) | ||
234 | #else | ||
235 | #define __USED _Pragma("__root") | ||
236 | #endif | ||
237 | #endif | ||
238 | |||
239 | #ifndef __WEAK | ||
240 | #if __ICCARM_V8 | ||
241 | #define __WEAK __attribute__((weak)) | ||
242 | #else | ||
243 | #define __WEAK _Pragma("__weak") | ||
244 | #endif | ||
245 | #endif | ||
246 | |||
247 | #ifndef __PROGRAM_START | ||
248 | #define __PROGRAM_START __iar_program_start | ||
249 | #endif | ||
250 | |||
251 | #ifndef __INITIAL_SP | ||
252 | #define __INITIAL_SP CSTACK$$Limit | ||
253 | #endif | ||
254 | |||
255 | #ifndef __STACK_LIMIT | ||
256 | #define __STACK_LIMIT CSTACK$$Base | ||
257 | #endif | ||
258 | |||
259 | #ifndef __VECTOR_TABLE | ||
260 | #define __VECTOR_TABLE __vector_table | ||
261 | #endif | ||
262 | |||
263 | #ifndef __VECTOR_TABLE_ATTRIBUTE | ||
264 | #define __VECTOR_TABLE_ATTRIBUTE @".intvec" | ||
265 | #endif | ||
266 | |||
267 | #ifndef __ICCARM_INTRINSICS_VERSION__ | ||
268 | #define __ICCARM_INTRINSICS_VERSION__ 0 | ||
269 | #endif | ||
270 | |||
271 | #if __ICCARM_INTRINSICS_VERSION__ == 2 | ||
272 | |||
273 | #if defined(__CLZ) | ||
274 | #undef __CLZ | ||
275 | #endif | ||
276 | #if defined(__REVSH) | ||
277 | #undef __REVSH | ||
278 | #endif | ||
279 | #if defined(__RBIT) | ||
280 | #undef __RBIT | ||
281 | #endif | ||
282 | #if defined(__SSAT) | ||
283 | #undef __SSAT | ||
284 | #endif | ||
285 | #if defined(__USAT) | ||
286 | #undef __USAT | ||
287 | #endif | ||
288 | |||
289 | #include "iccarm_builtin.h" | ||
290 | |||
291 | #define __disable_fault_irq __iar_builtin_disable_fiq | ||
292 | #define __disable_irq __iar_builtin_disable_interrupt | ||
293 | #define __enable_fault_irq __iar_builtin_enable_fiq | ||
294 | #define __enable_irq __iar_builtin_enable_interrupt | ||
295 | #define __arm_rsr __iar_builtin_rsr | ||
296 | #define __arm_wsr __iar_builtin_wsr | ||
297 | |||
298 | |||
299 | #define __get_APSR() (__arm_rsr("APSR")) | ||
300 | #define __get_BASEPRI() (__arm_rsr("BASEPRI")) | ||
301 | #define __get_CONTROL() (__arm_rsr("CONTROL")) | ||
302 | #define __get_FAULTMASK() (__arm_rsr("FAULTMASK")) | ||
303 | |||
304 | #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ | ||
305 | (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) | ||
306 | #define __get_FPSCR() (__arm_rsr("FPSCR")) | ||
307 | #define __set_FPSCR(VALUE) (__arm_wsr("FPSCR", (VALUE))) | ||
308 | #else | ||
309 | #define __get_FPSCR() ( 0 ) | ||
310 | #define __set_FPSCR(VALUE) ((void)VALUE) | ||
311 | #endif | ||
312 | |||
313 | #define __get_IPSR() (__arm_rsr("IPSR")) | ||
314 | #define __get_MSP() (__arm_rsr("MSP")) | ||
315 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ | ||
316 | (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) | ||
317 | // without main extensions, the non-secure MSPLIM is RAZ/WI | ||
318 | #define __get_MSPLIM() (0U) | ||
319 | #else | ||
320 | #define __get_MSPLIM() (__arm_rsr("MSPLIM")) | ||
321 | #endif | ||
322 | #define __get_PRIMASK() (__arm_rsr("PRIMASK")) | ||
323 | #define __get_PSP() (__arm_rsr("PSP")) | ||
324 | |||
325 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ | ||
326 | (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) | ||
327 | // without main extensions, the non-secure PSPLIM is RAZ/WI | ||
328 | #define __get_PSPLIM() (0U) | ||
329 | #else | ||
330 | #define __get_PSPLIM() (__arm_rsr("PSPLIM")) | ||
331 | #endif | ||
332 | |||
333 | #define __get_xPSR() (__arm_rsr("xPSR")) | ||
334 | |||
335 | #define __set_BASEPRI(VALUE) (__arm_wsr("BASEPRI", (VALUE))) | ||
336 | #define __set_BASEPRI_MAX(VALUE) (__arm_wsr("BASEPRI_MAX", (VALUE))) | ||
337 | #define __set_CONTROL(VALUE) (__arm_wsr("CONTROL", (VALUE))) | ||
338 | #define __set_FAULTMASK(VALUE) (__arm_wsr("FAULTMASK", (VALUE))) | ||
339 | #define __set_MSP(VALUE) (__arm_wsr("MSP", (VALUE))) | ||
340 | |||
341 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ | ||
342 | (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) | ||
343 | // without main extensions, the non-secure MSPLIM is RAZ/WI | ||
344 | #define __set_MSPLIM(VALUE) ((void)(VALUE)) | ||
345 | #else | ||
346 | #define __set_MSPLIM(VALUE) (__arm_wsr("MSPLIM", (VALUE))) | ||
347 | #endif | ||
348 | #define __set_PRIMASK(VALUE) (__arm_wsr("PRIMASK", (VALUE))) | ||
349 | #define __set_PSP(VALUE) (__arm_wsr("PSP", (VALUE))) | ||
350 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ | ||
351 | (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) | ||
352 | // without main extensions, the non-secure PSPLIM is RAZ/WI | ||
353 | #define __set_PSPLIM(VALUE) ((void)(VALUE)) | ||
354 | #else | ||
355 | #define __set_PSPLIM(VALUE) (__arm_wsr("PSPLIM", (VALUE))) | ||
356 | #endif | ||
357 | |||
358 | #define __TZ_get_CONTROL_NS() (__arm_rsr("CONTROL_NS")) | ||
359 | #define __TZ_set_CONTROL_NS(VALUE) (__arm_wsr("CONTROL_NS", (VALUE))) | ||
360 | #define __TZ_get_PSP_NS() (__arm_rsr("PSP_NS")) | ||
361 | #define __TZ_set_PSP_NS(VALUE) (__arm_wsr("PSP_NS", (VALUE))) | ||
362 | #define __TZ_get_MSP_NS() (__arm_rsr("MSP_NS")) | ||
363 | #define __TZ_set_MSP_NS(VALUE) (__arm_wsr("MSP_NS", (VALUE))) | ||
364 | #define __TZ_get_SP_NS() (__arm_rsr("SP_NS")) | ||
365 | #define __TZ_set_SP_NS(VALUE) (__arm_wsr("SP_NS", (VALUE))) | ||
366 | #define __TZ_get_PRIMASK_NS() (__arm_rsr("PRIMASK_NS")) | ||
367 | #define __TZ_set_PRIMASK_NS(VALUE) (__arm_wsr("PRIMASK_NS", (VALUE))) | ||
368 | #define __TZ_get_BASEPRI_NS() (__arm_rsr("BASEPRI_NS")) | ||
369 | #define __TZ_set_BASEPRI_NS(VALUE) (__arm_wsr("BASEPRI_NS", (VALUE))) | ||
370 | #define __TZ_get_FAULTMASK_NS() (__arm_rsr("FAULTMASK_NS")) | ||
371 | #define __TZ_set_FAULTMASK_NS(VALUE)(__arm_wsr("FAULTMASK_NS", (VALUE))) | ||
372 | |||
373 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ | ||
374 | (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) | ||
375 | // without main extensions, the non-secure PSPLIM is RAZ/WI | ||
376 | #define __TZ_get_PSPLIM_NS() (0U) | ||
377 | #define __TZ_set_PSPLIM_NS(VALUE) ((void)(VALUE)) | ||
378 | #else | ||
379 | #define __TZ_get_PSPLIM_NS() (__arm_rsr("PSPLIM_NS")) | ||
380 | #define __TZ_set_PSPLIM_NS(VALUE) (__arm_wsr("PSPLIM_NS", (VALUE))) | ||
381 | #endif | ||
382 | |||
383 | #define __TZ_get_MSPLIM_NS() (__arm_rsr("MSPLIM_NS")) | ||
384 | #define __TZ_set_MSPLIM_NS(VALUE) (__arm_wsr("MSPLIM_NS", (VALUE))) | ||
385 | |||
386 | #define __NOP __iar_builtin_no_operation | ||
387 | |||
388 | #define __CLZ __iar_builtin_CLZ | ||
389 | #define __CLREX __iar_builtin_CLREX | ||
390 | |||
391 | #define __DMB __iar_builtin_DMB | ||
392 | #define __DSB __iar_builtin_DSB | ||
393 | #define __ISB __iar_builtin_ISB | ||
394 | |||
395 | #define __LDREXB __iar_builtin_LDREXB | ||
396 | #define __LDREXH __iar_builtin_LDREXH | ||
397 | #define __LDREXW __iar_builtin_LDREX | ||
398 | |||
399 | #define __RBIT __iar_builtin_RBIT | ||
400 | #define __REV __iar_builtin_REV | ||
401 | #define __REV16 __iar_builtin_REV16 | ||
402 | |||
403 | __IAR_FT int16_t __REVSH(int16_t val) | ||
404 | { | ||
405 | return (int16_t) __iar_builtin_REVSH(val); | ||
406 | } | ||
407 | |||
408 | #define __ROR __iar_builtin_ROR | ||
409 | #define __RRX __iar_builtin_RRX | ||
410 | |||
411 | #define __SEV __iar_builtin_SEV | ||
412 | |||
413 | #if !__IAR_M0_FAMILY | ||
414 | #define __SSAT __iar_builtin_SSAT | ||
415 | #endif | ||
416 | |||
417 | #define __STREXB __iar_builtin_STREXB | ||
418 | #define __STREXH __iar_builtin_STREXH | ||
419 | #define __STREXW __iar_builtin_STREX | ||
420 | |||
421 | #if !__IAR_M0_FAMILY | ||
422 | #define __USAT __iar_builtin_USAT | ||
423 | #endif | ||
424 | |||
425 | #define __WFE __iar_builtin_WFE | ||
426 | #define __WFI __iar_builtin_WFI | ||
427 | |||
428 | #if __ARM_MEDIA__ | ||
429 | #define __SADD8 __iar_builtin_SADD8 | ||
430 | #define __QADD8 __iar_builtin_QADD8 | ||
431 | #define __SHADD8 __iar_builtin_SHADD8 | ||
432 | #define __UADD8 __iar_builtin_UADD8 | ||
433 | #define __UQADD8 __iar_builtin_UQADD8 | ||
434 | #define __UHADD8 __iar_builtin_UHADD8 | ||
435 | #define __SSUB8 __iar_builtin_SSUB8 | ||
436 | #define __QSUB8 __iar_builtin_QSUB8 | ||
437 | #define __SHSUB8 __iar_builtin_SHSUB8 | ||
438 | #define __USUB8 __iar_builtin_USUB8 | ||
439 | #define __UQSUB8 __iar_builtin_UQSUB8 | ||
440 | #define __UHSUB8 __iar_builtin_UHSUB8 | ||
441 | #define __SADD16 __iar_builtin_SADD16 | ||
442 | #define __QADD16 __iar_builtin_QADD16 | ||
443 | #define __SHADD16 __iar_builtin_SHADD16 | ||
444 | #define __UADD16 __iar_builtin_UADD16 | ||
445 | #define __UQADD16 __iar_builtin_UQADD16 | ||
446 | #define __UHADD16 __iar_builtin_UHADD16 | ||
447 | #define __SSUB16 __iar_builtin_SSUB16 | ||
448 | #define __QSUB16 __iar_builtin_QSUB16 | ||
449 | #define __SHSUB16 __iar_builtin_SHSUB16 | ||
450 | #define __USUB16 __iar_builtin_USUB16 | ||
451 | #define __UQSUB16 __iar_builtin_UQSUB16 | ||
452 | #define __UHSUB16 __iar_builtin_UHSUB16 | ||
453 | #define __SASX __iar_builtin_SASX | ||
454 | #define __QASX __iar_builtin_QASX | ||
455 | #define __SHASX __iar_builtin_SHASX | ||
456 | #define __UASX __iar_builtin_UASX | ||
457 | #define __UQASX __iar_builtin_UQASX | ||
458 | #define __UHASX __iar_builtin_UHASX | ||
459 | #define __SSAX __iar_builtin_SSAX | ||
460 | #define __QSAX __iar_builtin_QSAX | ||
461 | #define __SHSAX __iar_builtin_SHSAX | ||
462 | #define __USAX __iar_builtin_USAX | ||
463 | #define __UQSAX __iar_builtin_UQSAX | ||
464 | #define __UHSAX __iar_builtin_UHSAX | ||
465 | #define __USAD8 __iar_builtin_USAD8 | ||
466 | #define __USADA8 __iar_builtin_USADA8 | ||
467 | #define __SSAT16 __iar_builtin_SSAT16 | ||
468 | #define __USAT16 __iar_builtin_USAT16 | ||
469 | #define __UXTB16 __iar_builtin_UXTB16 | ||
470 | #define __UXTAB16 __iar_builtin_UXTAB16 | ||
471 | #define __SXTB16 __iar_builtin_SXTB16 | ||
472 | #define __SXTAB16 __iar_builtin_SXTAB16 | ||
473 | #define __SMUAD __iar_builtin_SMUAD | ||
474 | #define __SMUADX __iar_builtin_SMUADX | ||
475 | #define __SMMLA __iar_builtin_SMMLA | ||
476 | #define __SMLAD __iar_builtin_SMLAD | ||
477 | #define __SMLADX __iar_builtin_SMLADX | ||
478 | #define __SMLALD __iar_builtin_SMLALD | ||
479 | #define __SMLALDX __iar_builtin_SMLALDX | ||
480 | #define __SMUSD __iar_builtin_SMUSD | ||
481 | #define __SMUSDX __iar_builtin_SMUSDX | ||
482 | #define __SMLSD __iar_builtin_SMLSD | ||
483 | #define __SMLSDX __iar_builtin_SMLSDX | ||
484 | #define __SMLSLD __iar_builtin_SMLSLD | ||
485 | #define __SMLSLDX __iar_builtin_SMLSLDX | ||
486 | #define __SEL __iar_builtin_SEL | ||
487 | #define __QADD __iar_builtin_QADD | ||
488 | #define __QSUB __iar_builtin_QSUB | ||
489 | #define __PKHBT __iar_builtin_PKHBT | ||
490 | #define __PKHTB __iar_builtin_PKHTB | ||
491 | #endif | ||
492 | |||
493 | #else /* __ICCARM_INTRINSICS_VERSION__ == 2 */ | ||
494 | |||
495 | #if __IAR_M0_FAMILY | ||
496 | /* Avoid clash between intrinsics.h and arm_math.h when compiling for Cortex-M0. */ | ||
497 | #define __CLZ __cmsis_iar_clz_not_active | ||
498 | #define __SSAT __cmsis_iar_ssat_not_active | ||
499 | #define __USAT __cmsis_iar_usat_not_active | ||
500 | #define __RBIT __cmsis_iar_rbit_not_active | ||
501 | #define __get_APSR __cmsis_iar_get_APSR_not_active | ||
502 | #endif | ||
503 | |||
504 | |||
505 | #if (!((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ | ||
506 | (defined (__FPU_USED ) && (__FPU_USED == 1U)) )) | ||
507 | #define __get_FPSCR __cmsis_iar_get_FPSR_not_active | ||
508 | #define __set_FPSCR __cmsis_iar_set_FPSR_not_active | ||
509 | #endif | ||
510 | |||
511 | #ifdef __INTRINSICS_INCLUDED | ||
512 | #error intrinsics.h is already included previously! | ||
513 | #endif | ||
514 | |||
515 | #include <intrinsics.h> | ||
516 | |||
517 | #if __IAR_M0_FAMILY | ||
518 | /* Avoid clash between intrinsics.h and arm_math.h when compiling for Cortex-M0. */ | ||
519 | #undef __CLZ | ||
520 | #undef __SSAT | ||
521 | #undef __USAT | ||
522 | #undef __RBIT | ||
523 | #undef __get_APSR | ||
524 | |||
525 | __STATIC_INLINE uint8_t __CLZ(uint32_t data) | ||
526 | { | ||
527 | if (data == 0U) { return 32U; } | ||
528 | |||
529 | uint32_t count = 0U; | ||
530 | uint32_t mask = 0x80000000U; | ||
531 | |||
532 | while ((data & mask) == 0U) | ||
533 | { | ||
534 | count += 1U; | ||
535 | mask = mask >> 1U; | ||
536 | } | ||
537 | return count; | ||
538 | } | ||
539 | |||
540 | __STATIC_INLINE uint32_t __RBIT(uint32_t v) | ||
541 | { | ||
542 | uint8_t sc = 31U; | ||
543 | uint32_t r = v; | ||
544 | for (v >>= 1U; v; v >>= 1U) | ||
545 | { | ||
546 | r <<= 1U; | ||
547 | r |= v & 1U; | ||
548 | sc--; | ||
549 | } | ||
550 | return (r << sc); | ||
551 | } | ||
552 | |||
553 | __STATIC_INLINE uint32_t __get_APSR(void) | ||
554 | { | ||
555 | uint32_t res; | ||
556 | __asm("MRS %0,APSR" : "=r" (res)); | ||
557 | return res; | ||
558 | } | ||
559 | |||
560 | #endif | ||
561 | |||
562 | #if (!((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ | ||
563 | (defined (__FPU_USED ) && (__FPU_USED == 1U)) )) | ||
564 | #undef __get_FPSCR | ||
565 | #undef __set_FPSCR | ||
566 | #define __get_FPSCR() (0) | ||
567 | #define __set_FPSCR(VALUE) ((void)VALUE) | ||
568 | #endif | ||
569 | |||
570 | #pragma diag_suppress=Pe940 | ||
571 | #pragma diag_suppress=Pe177 | ||
572 | |||
573 | #define __enable_irq __enable_interrupt | ||
574 | #define __disable_irq __disable_interrupt | ||
575 | #define __NOP __no_operation | ||
576 | |||
577 | #define __get_xPSR __get_PSR | ||
578 | |||
579 | #if (!defined(__ARM_ARCH_6M__) || __ARM_ARCH_6M__==0) | ||
580 | |||
581 | __IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr) | ||
582 | { | ||
583 | return __LDREX((unsigned long *)ptr); | ||
584 | } | ||
585 | |||
586 | __IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr) | ||
587 | { | ||
588 | return __STREX(value, (unsigned long *)ptr); | ||
589 | } | ||
590 | #endif | ||
591 | |||
592 | |||
593 | /* __CORTEX_M is defined in core_cm0.h, core_cm3.h and core_cm4.h. */ | ||
594 | #if (__CORTEX_M >= 0x03) | ||
595 | |||
596 | __IAR_FT uint32_t __RRX(uint32_t value) | ||
597 | { | ||
598 | uint32_t result; | ||
599 | __ASM("RRX %0, %1" : "=r"(result) : "r" (value) : "cc"); | ||
600 | return(result); | ||
601 | } | ||
602 | |||
603 | __IAR_FT void __set_BASEPRI_MAX(uint32_t value) | ||
604 | { | ||
605 | __asm volatile("MSR BASEPRI_MAX,%0"::"r" (value)); | ||
606 | } | ||
607 | |||
608 | |||
609 | #define __enable_fault_irq __enable_fiq | ||
610 | #define __disable_fault_irq __disable_fiq | ||
611 | |||
612 | |||
613 | #endif /* (__CORTEX_M >= 0x03) */ | ||
614 | |||
615 | __IAR_FT uint32_t __ROR(uint32_t op1, uint32_t op2) | ||
616 | { | ||
617 | return (op1 >> op2) | (op1 << ((sizeof(op1)*8)-op2)); | ||
618 | } | ||
619 | |||
620 | #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ | ||
621 | (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) | ||
622 | |||
623 | __IAR_FT uint32_t __get_MSPLIM(void) | ||
624 | { | ||
625 | uint32_t res; | ||
626 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ | ||
627 | (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) | ||
628 | // without main extensions, the non-secure MSPLIM is RAZ/WI | ||
629 | res = 0U; | ||
630 | #else | ||
631 | __asm volatile("MRS %0,MSPLIM" : "=r" (res)); | ||
632 | #endif | ||
633 | return res; | ||
634 | } | ||
635 | |||
636 | __IAR_FT void __set_MSPLIM(uint32_t value) | ||
637 | { | ||
638 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ | ||
639 | (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) | ||
640 | // without main extensions, the non-secure MSPLIM is RAZ/WI | ||
641 | (void)value; | ||
642 | #else | ||
643 | __asm volatile("MSR MSPLIM,%0" :: "r" (value)); | ||
644 | #endif | ||
645 | } | ||
646 | |||
647 | __IAR_FT uint32_t __get_PSPLIM(void) | ||
648 | { | ||
649 | uint32_t res; | ||
650 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ | ||
651 | (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) | ||
652 | // without main extensions, the non-secure PSPLIM is RAZ/WI | ||
653 | res = 0U; | ||
654 | #else | ||
655 | __asm volatile("MRS %0,PSPLIM" : "=r" (res)); | ||
656 | #endif | ||
657 | return res; | ||
658 | } | ||
659 | |||
660 | __IAR_FT void __set_PSPLIM(uint32_t value) | ||
661 | { | ||
662 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ | ||
663 | (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) | ||
664 | // without main extensions, the non-secure PSPLIM is RAZ/WI | ||
665 | (void)value; | ||
666 | #else | ||
667 | __asm volatile("MSR PSPLIM,%0" :: "r" (value)); | ||
668 | #endif | ||
669 | } | ||
670 | |||
671 | __IAR_FT uint32_t __TZ_get_CONTROL_NS(void) | ||
672 | { | ||
673 | uint32_t res; | ||
674 | __asm volatile("MRS %0,CONTROL_NS" : "=r" (res)); | ||
675 | return res; | ||
676 | } | ||
677 | |||
678 | __IAR_FT void __TZ_set_CONTROL_NS(uint32_t value) | ||
679 | { | ||
680 | __asm volatile("MSR CONTROL_NS,%0" :: "r" (value)); | ||
681 | } | ||
682 | |||
683 | __IAR_FT uint32_t __TZ_get_PSP_NS(void) | ||
684 | { | ||
685 | uint32_t res; | ||
686 | __asm volatile("MRS %0,PSP_NS" : "=r" (res)); | ||
687 | return res; | ||
688 | } | ||
689 | |||
690 | __IAR_FT void __TZ_set_PSP_NS(uint32_t value) | ||
691 | { | ||
692 | __asm volatile("MSR PSP_NS,%0" :: "r" (value)); | ||
693 | } | ||
694 | |||
695 | __IAR_FT uint32_t __TZ_get_MSP_NS(void) | ||
696 | { | ||
697 | uint32_t res; | ||
698 | __asm volatile("MRS %0,MSP_NS" : "=r" (res)); | ||
699 | return res; | ||
700 | } | ||
701 | |||
702 | __IAR_FT void __TZ_set_MSP_NS(uint32_t value) | ||
703 | { | ||
704 | __asm volatile("MSR MSP_NS,%0" :: "r" (value)); | ||
705 | } | ||
706 | |||
707 | __IAR_FT uint32_t __TZ_get_SP_NS(void) | ||
708 | { | ||
709 | uint32_t res; | ||
710 | __asm volatile("MRS %0,SP_NS" : "=r" (res)); | ||
711 | return res; | ||
712 | } | ||
713 | __IAR_FT void __TZ_set_SP_NS(uint32_t value) | ||
714 | { | ||
715 | __asm volatile("MSR SP_NS,%0" :: "r" (value)); | ||
716 | } | ||
717 | |||
718 | __IAR_FT uint32_t __TZ_get_PRIMASK_NS(void) | ||
719 | { | ||
720 | uint32_t res; | ||
721 | __asm volatile("MRS %0,PRIMASK_NS" : "=r" (res)); | ||
722 | return res; | ||
723 | } | ||
724 | |||
725 | __IAR_FT void __TZ_set_PRIMASK_NS(uint32_t value) | ||
726 | { | ||
727 | __asm volatile("MSR PRIMASK_NS,%0" :: "r" (value)); | ||
728 | } | ||
729 | |||
730 | __IAR_FT uint32_t __TZ_get_BASEPRI_NS(void) | ||
731 | { | ||
732 | uint32_t res; | ||
733 | __asm volatile("MRS %0,BASEPRI_NS" : "=r" (res)); | ||
734 | return res; | ||
735 | } | ||
736 | |||
737 | __IAR_FT void __TZ_set_BASEPRI_NS(uint32_t value) | ||
738 | { | ||
739 | __asm volatile("MSR BASEPRI_NS,%0" :: "r" (value)); | ||
740 | } | ||
741 | |||
742 | __IAR_FT uint32_t __TZ_get_FAULTMASK_NS(void) | ||
743 | { | ||
744 | uint32_t res; | ||
745 | __asm volatile("MRS %0,FAULTMASK_NS" : "=r" (res)); | ||
746 | return res; | ||
747 | } | ||
748 | |||
749 | __IAR_FT void __TZ_set_FAULTMASK_NS(uint32_t value) | ||
750 | { | ||
751 | __asm volatile("MSR FAULTMASK_NS,%0" :: "r" (value)); | ||
752 | } | ||
753 | |||
754 | __IAR_FT uint32_t __TZ_get_PSPLIM_NS(void) | ||
755 | { | ||
756 | uint32_t res; | ||
757 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ | ||
758 | (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) | ||
759 | // without main extensions, the non-secure PSPLIM is RAZ/WI | ||
760 | res = 0U; | ||
761 | #else | ||
762 | __asm volatile("MRS %0,PSPLIM_NS" : "=r" (res)); | ||
763 | #endif | ||
764 | return res; | ||
765 | } | ||
766 | |||
767 | __IAR_FT void __TZ_set_PSPLIM_NS(uint32_t value) | ||
768 | { | ||
769 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ | ||
770 | (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) | ||
771 | // without main extensions, the non-secure PSPLIM is RAZ/WI | ||
772 | (void)value; | ||
773 | #else | ||
774 | __asm volatile("MSR PSPLIM_NS,%0" :: "r" (value)); | ||
775 | #endif | ||
776 | } | ||
777 | |||
778 | __IAR_FT uint32_t __TZ_get_MSPLIM_NS(void) | ||
779 | { | ||
780 | uint32_t res; | ||
781 | __asm volatile("MRS %0,MSPLIM_NS" : "=r" (res)); | ||
782 | return res; | ||
783 | } | ||
784 | |||
785 | __IAR_FT void __TZ_set_MSPLIM_NS(uint32_t value) | ||
786 | { | ||
787 | __asm volatile("MSR MSPLIM_NS,%0" :: "r" (value)); | ||
788 | } | ||
789 | |||
790 | #endif /* __ARM_ARCH_8M_MAIN__ or __ARM_ARCH_8M_BASE__ */ | ||
791 | |||
792 | #endif /* __ICCARM_INTRINSICS_VERSION__ == 2 */ | ||
793 | |||
794 | #define __BKPT(value) __asm volatile ("BKPT %0" : : "i"(value)) | ||
795 | |||
796 | #if __IAR_M0_FAMILY | ||
797 | __STATIC_INLINE int32_t __SSAT(int32_t val, uint32_t sat) | ||
798 | { | ||
799 | if ((sat >= 1U) && (sat <= 32U)) | ||
800 | { | ||
801 | const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U); | ||
802 | const int32_t min = -1 - max ; | ||
803 | if (val > max) | ||
804 | { | ||
805 | return max; | ||
806 | } | ||
807 | else if (val < min) | ||
808 | { | ||
809 | return min; | ||
810 | } | ||
811 | } | ||
812 | return val; | ||
813 | } | ||
814 | |||
815 | __STATIC_INLINE uint32_t __USAT(int32_t val, uint32_t sat) | ||
816 | { | ||
817 | if (sat <= 31U) | ||
818 | { | ||
819 | const uint32_t max = ((1U << sat) - 1U); | ||
820 | if (val > (int32_t)max) | ||
821 | { | ||
822 | return max; | ||
823 | } | ||
824 | else if (val < 0) | ||
825 | { | ||
826 | return 0U; | ||
827 | } | ||
828 | } | ||
829 | return (uint32_t)val; | ||
830 | } | ||
831 | #endif | ||
832 | |||
833 | #if (__CORTEX_M >= 0x03) /* __CORTEX_M is defined in core_cm0.h, core_cm3.h and core_cm4.h. */ | ||
834 | |||
835 | __IAR_FT uint8_t __LDRBT(volatile uint8_t *addr) | ||
836 | { | ||
837 | uint32_t res; | ||
838 | __ASM("LDRBT %0, [%1]" : "=r" (res) : "r" (addr) : "memory"); | ||
839 | return ((uint8_t)res); | ||
840 | } | ||
841 | |||
842 | __IAR_FT uint16_t __LDRHT(volatile uint16_t *addr) | ||
843 | { | ||
844 | uint32_t res; | ||
845 | __ASM("LDRHT %0, [%1]" : "=r" (res) : "r" (addr) : "memory"); | ||
846 | return ((uint16_t)res); | ||
847 | } | ||
848 | |||
849 | __IAR_FT uint32_t __LDRT(volatile uint32_t *addr) | ||
850 | { | ||
851 | uint32_t res; | ||
852 | __ASM("LDRT %0, [%1]" : "=r" (res) : "r" (addr) : "memory"); | ||
853 | return res; | ||
854 | } | ||
855 | |||
856 | __IAR_FT void __STRBT(uint8_t value, volatile uint8_t *addr) | ||
857 | { | ||
858 | __ASM("STRBT %1, [%0]" : : "r" (addr), "r" ((uint32_t)value) : "memory"); | ||
859 | } | ||
860 | |||
861 | __IAR_FT void __STRHT(uint16_t value, volatile uint16_t *addr) | ||
862 | { | ||
863 | __ASM("STRHT %1, [%0]" : : "r" (addr), "r" ((uint32_t)value) : "memory"); | ||
864 | } | ||
865 | |||
866 | __IAR_FT void __STRT(uint32_t value, volatile uint32_t *addr) | ||
867 | { | ||
868 | __ASM("STRT %1, [%0]" : : "r" (addr), "r" (value) : "memory"); | ||
869 | } | ||
870 | |||
871 | #endif /* (__CORTEX_M >= 0x03) */ | ||
872 | |||
873 | #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ | ||
874 | (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) | ||
875 | |||
876 | |||
877 | __IAR_FT uint8_t __LDAB(volatile uint8_t *ptr) | ||
878 | { | ||
879 | uint32_t res; | ||
880 | __ASM volatile ("LDAB %0, [%1]" : "=r" (res) : "r" (ptr) : "memory"); | ||
881 | return ((uint8_t)res); | ||
882 | } | ||
883 | |||
884 | __IAR_FT uint16_t __LDAH(volatile uint16_t *ptr) | ||
885 | { | ||
886 | uint32_t res; | ||
887 | __ASM volatile ("LDAH %0, [%1]" : "=r" (res) : "r" (ptr) : "memory"); | ||
888 | return ((uint16_t)res); | ||
889 | } | ||
890 | |||
891 | __IAR_FT uint32_t __LDA(volatile uint32_t *ptr) | ||
892 | { | ||
893 | uint32_t res; | ||
894 | __ASM volatile ("LDA %0, [%1]" : "=r" (res) : "r" (ptr) : "memory"); | ||
895 | return res; | ||
896 | } | ||
897 | |||
898 | __IAR_FT void __STLB(uint8_t value, volatile uint8_t *ptr) | ||
899 | { | ||
900 | __ASM volatile ("STLB %1, [%0]" :: "r" (ptr), "r" (value) : "memory"); | ||
901 | } | ||
902 | |||
903 | __IAR_FT void __STLH(uint16_t value, volatile uint16_t *ptr) | ||
904 | { | ||
905 | __ASM volatile ("STLH %1, [%0]" :: "r" (ptr), "r" (value) : "memory"); | ||
906 | } | ||
907 | |||
908 | __IAR_FT void __STL(uint32_t value, volatile uint32_t *ptr) | ||
909 | { | ||
910 | __ASM volatile ("STL %1, [%0]" :: "r" (ptr), "r" (value) : "memory"); | ||
911 | } | ||
912 | |||
913 | __IAR_FT uint8_t __LDAEXB(volatile uint8_t *ptr) | ||
914 | { | ||
915 | uint32_t res; | ||
916 | __ASM volatile ("LDAEXB %0, [%1]" : "=r" (res) : "r" (ptr) : "memory"); | ||
917 | return ((uint8_t)res); | ||
918 | } | ||
919 | |||
920 | __IAR_FT uint16_t __LDAEXH(volatile uint16_t *ptr) | ||
921 | { | ||
922 | uint32_t res; | ||
923 | __ASM volatile ("LDAEXH %0, [%1]" : "=r" (res) : "r" (ptr) : "memory"); | ||
924 | return ((uint16_t)res); | ||
925 | } | ||
926 | |||
927 | __IAR_FT uint32_t __LDAEX(volatile uint32_t *ptr) | ||
928 | { | ||
929 | uint32_t res; | ||
930 | __ASM volatile ("LDAEX %0, [%1]" : "=r" (res) : "r" (ptr) : "memory"); | ||
931 | return res; | ||
932 | } | ||
933 | |||
934 | __IAR_FT uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr) | ||
935 | { | ||
936 | uint32_t res; | ||
937 | __ASM volatile ("STLEXB %0, %2, [%1]" : "=r" (res) : "r" (ptr), "r" (value) : "memory"); | ||
938 | return res; | ||
939 | } | ||
940 | |||
941 | __IAR_FT uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr) | ||
942 | { | ||
943 | uint32_t res; | ||
944 | __ASM volatile ("STLEXH %0, %2, [%1]" : "=r" (res) : "r" (ptr), "r" (value) : "memory"); | ||
945 | return res; | ||
946 | } | ||
947 | |||
948 | __IAR_FT uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr) | ||
949 | { | ||
950 | uint32_t res; | ||
951 | __ASM volatile ("STLEX %0, %2, [%1]" : "=r" (res) : "r" (ptr), "r" (value) : "memory"); | ||
952 | return res; | ||
953 | } | ||
954 | |||
955 | #endif /* __ARM_ARCH_8M_MAIN__ or __ARM_ARCH_8M_BASE__ */ | ||
956 | |||
957 | #undef __IAR_FT | ||
958 | #undef __IAR_M0_FAMILY | ||
959 | #undef __ICCARM_V8 | ||
960 | |||
961 | #pragma diag_default=Pe940 | ||
962 | #pragma diag_default=Pe177 | ||
963 | |||
964 | #endif /* __CMSIS_ICCARM_H__ */ | ||