comparison l476rg/Drivers/CMSIS/Include/cmsis_armcc.h @ 0:32a3b1785697

a rough draft of Hardware Abstraction Layer for C++ STM32L476RG drivers
author cin
date Thu, 12 Jan 2017 02:45:43 +0300
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:32a3b1785697
1 /**************************************************************************//**
2 * @file cmsis_armcc.h
3 * @brief CMSIS Cortex-M Core Function/Instruction Header File
4 * @version V4.30
5 * @date 20. October 2015
6 ******************************************************************************/
7 /* Copyright (c) 2009 - 2015 ARM LIMITED
8
9 All rights reserved.
10 Redistribution and use in source and binary forms, with or without
11 modification, are permitted provided that the following conditions are met:
12 - Redistributions of source code must retain the above copyright
13 notice, this list of conditions and the following disclaimer.
14 - Redistributions in binary form must reproduce the above copyright
15 notice, this list of conditions and the following disclaimer in the
16 documentation and/or other materials provided with the distribution.
17 - Neither the name of ARM nor the names of its contributors may be used
18 to endorse or promote products derived from this software without
19 specific prior written permission.
20 *
21 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
24 ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
25 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
27 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
28 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
29 CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
31 POSSIBILITY OF SUCH DAMAGE.
32 ---------------------------------------------------------------------------*/
33
34
35 #ifndef __CMSIS_ARMCC_H
36 #define __CMSIS_ARMCC_H
37
38
39 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 400677)
40 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
41 #endif
42
43 /* ########################### Core Function Access ########################### */
44 /** \ingroup CMSIS_Core_FunctionInterface
45 \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
46 @{
47 */
48
49 /* intrinsic void __enable_irq(); */
50 /* intrinsic void __disable_irq(); */
51
52 /**
53 \brief Get Control Register
54 \details Returns the content of the Control Register.
55 \return Control Register value
56 */
57 __STATIC_INLINE uint32_t __get_CONTROL(void)
58 {
59 register uint32_t __regControl __ASM("control");
60 return(__regControl);
61 }
62
63
64 /**
65 \brief Set Control Register
66 \details Writes the given value to the Control Register.
67 \param [in] control Control Register value to set
68 */
69 __STATIC_INLINE void __set_CONTROL(uint32_t control)
70 {
71 register uint32_t __regControl __ASM("control");
72 __regControl = control;
73 }
74
75
76 /**
77 \brief Get IPSR Register
78 \details Returns the content of the IPSR Register.
79 \return IPSR Register value
80 */
81 __STATIC_INLINE uint32_t __get_IPSR(void)
82 {
83 register uint32_t __regIPSR __ASM("ipsr");
84 return(__regIPSR);
85 }
86
87
88 /**
89 \brief Get APSR Register
90 \details Returns the content of the APSR Register.
91 \return APSR Register value
92 */
93 __STATIC_INLINE uint32_t __get_APSR(void)
94 {
95 register uint32_t __regAPSR __ASM("apsr");
96 return(__regAPSR);
97 }
98
99
100 /**
101 \brief Get xPSR Register
102 \details Returns the content of the xPSR Register.
103 \return xPSR Register value
104 */
105 __STATIC_INLINE uint32_t __get_xPSR(void)
106 {
107 register uint32_t __regXPSR __ASM("xpsr");
108 return(__regXPSR);
109 }
110
111
112 /**
113 \brief Get Process Stack Pointer
114 \details Returns the current value of the Process Stack Pointer (PSP).
115 \return PSP Register value
116 */
117 __STATIC_INLINE uint32_t __get_PSP(void)
118 {
119 register uint32_t __regProcessStackPointer __ASM("psp");
120 return(__regProcessStackPointer);
121 }
122
123
124 /**
125 \brief Set Process Stack Pointer
126 \details Assigns the given value to the Process Stack Pointer (PSP).
127 \param [in] topOfProcStack Process Stack Pointer value to set
128 */
129 __STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
130 {
131 register uint32_t __regProcessStackPointer __ASM("psp");
132 __regProcessStackPointer = topOfProcStack;
133 }
134
135
136 /**
137 \brief Get Main Stack Pointer
138 \details Returns the current value of the Main Stack Pointer (MSP).
139 \return MSP Register value
140 */
141 __STATIC_INLINE uint32_t __get_MSP(void)
142 {
143 register uint32_t __regMainStackPointer __ASM("msp");
144 return(__regMainStackPointer);
145 }
146
147
148 /**
149 \brief Set Main Stack Pointer
150 \details Assigns the given value to the Main Stack Pointer (MSP).
151 \param [in] topOfMainStack Main Stack Pointer value to set
152 */
153 __STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
154 {
155 register uint32_t __regMainStackPointer __ASM("msp");
156 __regMainStackPointer = topOfMainStack;
157 }
158
159
160 /**
161 \brief Get Priority Mask
162 \details Returns the current state of the priority mask bit from the Priority Mask Register.
163 \return Priority Mask value
164 */
165 __STATIC_INLINE uint32_t __get_PRIMASK(void)
166 {
167 register uint32_t __regPriMask __ASM("primask");
168 return(__regPriMask);
169 }
170
171
172 /**
173 \brief Set Priority Mask
174 \details Assigns the given value to the Priority Mask Register.
175 \param [in] priMask Priority Mask
176 */
177 __STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
178 {
179 register uint32_t __regPriMask __ASM("primask");
180 __regPriMask = (priMask);
181 }
182
183
184 #if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U)
185
186 /**
187 \brief Enable FIQ
188 \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
189 Can only be executed in Privileged modes.
190 */
191 #define __enable_fault_irq __enable_fiq
192
193
194 /**
195 \brief Disable FIQ
196 \details Disables FIQ interrupts by setting the F-bit in the CPSR.
197 Can only be executed in Privileged modes.
198 */
199 #define __disable_fault_irq __disable_fiq
200
201
202 /**
203 \brief Get Base Priority
204 \details Returns the current value of the Base Priority register.
205 \return Base Priority register value
206 */
207 __STATIC_INLINE uint32_t __get_BASEPRI(void)
208 {
209 register uint32_t __regBasePri __ASM("basepri");
210 return(__regBasePri);
211 }
212
213
214 /**
215 \brief Set Base Priority
216 \details Assigns the given value to the Base Priority register.
217 \param [in] basePri Base Priority value to set
218 */
219 __STATIC_INLINE void __set_BASEPRI(uint32_t basePri)
220 {
221 register uint32_t __regBasePri __ASM("basepri");
222 __regBasePri = (basePri & 0xFFU);
223 }
224
225
226 /**
227 \brief Set Base Priority with condition
228 \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
229 or the new value increases the BASEPRI priority level.
230 \param [in] basePri Base Priority value to set
231 */
232 __STATIC_INLINE void __set_BASEPRI_MAX(uint32_t basePri)
233 {
234 register uint32_t __regBasePriMax __ASM("basepri_max");
235 __regBasePriMax = (basePri & 0xFFU);
236 }
237
238
239 /**
240 \brief Get Fault Mask
241 \details Returns the current value of the Fault Mask register.
242 \return Fault Mask register value
243 */
244 __STATIC_INLINE uint32_t __get_FAULTMASK(void)
245 {
246 register uint32_t __regFaultMask __ASM("faultmask");
247 return(__regFaultMask);
248 }
249
250
251 /**
252 \brief Set Fault Mask
253 \details Assigns the given value to the Fault Mask register.
254 \param [in] faultMask Fault Mask value to set
255 */
256 __STATIC_INLINE void __set_FAULTMASK(uint32_t faultMask)
257 {
258 register uint32_t __regFaultMask __ASM("faultmask");
259 __regFaultMask = (faultMask & (uint32_t)1);
260 }
261
262 #endif /* (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U) */
263
264
265 #if (__CORTEX_M == 0x04U) || (__CORTEX_M == 0x07U)
266
267 /**
268 \brief Get FPSCR
269 \details Returns the current value of the Floating Point Status/Control register.
270 \return Floating Point Status/Control register value
271 */
272 __STATIC_INLINE uint32_t __get_FPSCR(void)
273 {
274 #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
275 register uint32_t __regfpscr __ASM("fpscr");
276 return(__regfpscr);
277 #else
278 return(0U);
279 #endif
280 }
281
282
283 /**
284 \brief Set FPSCR
285 \details Assigns the given value to the Floating Point Status/Control register.
286 \param [in] fpscr Floating Point Status/Control value to set
287 */
288 __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
289 {
290 #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
291 register uint32_t __regfpscr __ASM("fpscr");
292 __regfpscr = (fpscr);
293 #endif
294 }
295
296 #endif /* (__CORTEX_M == 0x04U) || (__CORTEX_M == 0x07U) */
297
298
299
300 /*@} end of CMSIS_Core_RegAccFunctions */
301
302
303 /* ########################## Core Instruction Access ######################### */
304 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
305 Access to dedicated instructions
306 @{
307 */
308
309 /**
310 \brief No Operation
311 \details No Operation does nothing. This instruction can be used for code alignment purposes.
312 */
313 #define __NOP __nop
314
315
316 /**
317 \brief Wait For Interrupt
318 \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
319 */
320 #define __WFI __wfi
321
322
323 /**
324 \brief Wait For Event
325 \details Wait For Event is a hint instruction that permits the processor to enter
326 a low-power state until one of a number of events occurs.
327 */
328 #define __WFE __wfe
329
330
331 /**
332 \brief Send Event
333 \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
334 */
335 #define __SEV __sev
336
337
338 /**
339 \brief Instruction Synchronization Barrier
340 \details Instruction Synchronization Barrier flushes the pipeline in the processor,
341 so that all instructions following the ISB are fetched from cache or memory,
342 after the instruction has been completed.
343 */
344 #define __ISB() do {\
345 __schedule_barrier();\
346 __isb(0xF);\
347 __schedule_barrier();\
348 } while (0U)
349
350 /**
351 \brief Data Synchronization Barrier
352 \details Acts as a special kind of Data Memory Barrier.
353 It completes when all explicit memory accesses before this instruction complete.
354 */
355 #define __DSB() do {\
356 __schedule_barrier();\
357 __dsb(0xF);\
358 __schedule_barrier();\
359 } while (0U)
360
361 /**
362 \brief Data Memory Barrier
363 \details Ensures the apparent order of the explicit memory operations before
364 and after the instruction, without ensuring their completion.
365 */
366 #define __DMB() do {\
367 __schedule_barrier();\
368 __dmb(0xF);\
369 __schedule_barrier();\
370 } while (0U)
371
372 /**
373 \brief Reverse byte order (32 bit)
374 \details Reverses the byte order in integer value.
375 \param [in] value Value to reverse
376 \return Reversed value
377 */
378 #define __REV __rev
379
380
381 /**
382 \brief Reverse byte order (16 bit)
383 \details Reverses the byte order in two unsigned short values.
384 \param [in] value Value to reverse
385 \return Reversed value
386 */
387 #ifndef __NO_EMBEDDED_ASM
388 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
389 {
390 rev16 r0, r0
391 bx lr
392 }
393 #endif
394
395 /**
396 \brief Reverse byte order in signed short value
397 \details Reverses the byte order in a signed short value with sign extension to integer.
398 \param [in] value Value to reverse
399 \return Reversed value
400 */
401 #ifndef __NO_EMBEDDED_ASM
402 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
403 {
404 revsh r0, r0
405 bx lr
406 }
407 #endif
408
409
410 /**
411 \brief Rotate Right in unsigned value (32 bit)
412 \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
413 \param [in] value Value to rotate
414 \param [in] value Number of Bits to rotate
415 \return Rotated value
416 */
417 #define __ROR __ror
418
419
420 /**
421 \brief Breakpoint
422 \details Causes the processor to enter Debug state.
423 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
424 \param [in] value is ignored by the processor.
425 If required, a debugger can use it to store additional information about the breakpoint.
426 */
427 #define __BKPT(value) __breakpoint(value)
428
429
430 /**
431 \brief Reverse bit order of value
432 \details Reverses the bit order of the given value.
433 \param [in] value Value to reverse
434 \return Reversed value
435 */
436 #if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U)
437 #define __RBIT __rbit
438 #else
439 __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
440 {
441 uint32_t result;
442 int32_t s = 4 /*sizeof(v)*/ * 8 - 1; /* extra shift needed at end */
443
444 result = value; /* r will be reversed bits of v; first get LSB of v */
445 for (value >>= 1U; value; value >>= 1U)
446 {
447 result <<= 1U;
448 result |= value & 1U;
449 s--;
450 }
451 result <<= s; /* shift when v's highest bits are zero */
452 return(result);
453 }
454 #endif
455
456
457 /**
458 \brief Count leading zeros
459 \details Counts the number of leading zeros of a data value.
460 \param [in] value Value to count the leading zeros
461 \return number of leading zeros in value
462 */
463 #define __CLZ __clz
464
465
466 #if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U)
467
468 /**
469 \brief LDR Exclusive (8 bit)
470 \details Executes a exclusive LDR instruction for 8 bit value.
471 \param [in] ptr Pointer to data
472 \return value of type uint8_t at (*ptr)
473 */
474 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
475 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
476 #else
477 #define __LDREXB(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint8_t ) __ldrex(ptr)) _Pragma("pop")
478 #endif
479
480
481 /**
482 \brief LDR Exclusive (16 bit)
483 \details Executes a exclusive LDR instruction for 16 bit values.
484 \param [in] ptr Pointer to data
485 \return value of type uint16_t at (*ptr)
486 */
487 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
488 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
489 #else
490 #define __LDREXH(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint16_t) __ldrex(ptr)) _Pragma("pop")
491 #endif
492
493
494 /**
495 \brief LDR Exclusive (32 bit)
496 \details Executes a exclusive LDR instruction for 32 bit values.
497 \param [in] ptr Pointer to data
498 \return value of type uint32_t at (*ptr)
499 */
500 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
501 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
502 #else
503 #define __LDREXW(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint32_t ) __ldrex(ptr)) _Pragma("pop")
504 #endif
505
506
507 /**
508 \brief STR Exclusive (8 bit)
509 \details Executes a exclusive STR instruction for 8 bit values.
510 \param [in] value Value to store
511 \param [in] ptr Pointer to location
512 \return 0 Function succeeded
513 \return 1 Function failed
514 */
515 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
516 #define __STREXB(value, ptr) __strex(value, ptr)
517 #else
518 #define __STREXB(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
519 #endif
520
521
522 /**
523 \brief STR Exclusive (16 bit)
524 \details Executes a exclusive STR instruction for 16 bit values.
525 \param [in] value Value to store
526 \param [in] ptr Pointer to location
527 \return 0 Function succeeded
528 \return 1 Function failed
529 */
530 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
531 #define __STREXH(value, ptr) __strex(value, ptr)
532 #else
533 #define __STREXH(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
534 #endif
535
536
537 /**
538 \brief STR Exclusive (32 bit)
539 \details Executes a exclusive STR instruction for 32 bit values.
540 \param [in] value Value to store
541 \param [in] ptr Pointer to location
542 \return 0 Function succeeded
543 \return 1 Function failed
544 */
545 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
546 #define __STREXW(value, ptr) __strex(value, ptr)
547 #else
548 #define __STREXW(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
549 #endif
550
551
552 /**
553 \brief Remove the exclusive lock
554 \details Removes the exclusive lock which is created by LDREX.
555 */
556 #define __CLREX __clrex
557
558
559 /**
560 \brief Signed Saturate
561 \details Saturates a signed value.
562 \param [in] value Value to be saturated
563 \param [in] sat Bit position to saturate to (1..32)
564 \return Saturated value
565 */
566 #define __SSAT __ssat
567
568
569 /**
570 \brief Unsigned Saturate
571 \details Saturates an unsigned value.
572 \param [in] value Value to be saturated
573 \param [in] sat Bit position to saturate to (0..31)
574 \return Saturated value
575 */
576 #define __USAT __usat
577
578
579 /**
580 \brief Rotate Right with Extend (32 bit)
581 \details Moves each bit of a bitstring right by one bit.
582 The carry input is shifted in at the left end of the bitstring.
583 \param [in] value Value to rotate
584 \return Rotated value
585 */
586 #ifndef __NO_EMBEDDED_ASM
587 __attribute__((section(".rrx_text"))) __STATIC_INLINE __ASM uint32_t __RRX(uint32_t value)
588 {
589 rrx r0, r0
590 bx lr
591 }
592 #endif
593
594
595 /**
596 \brief LDRT Unprivileged (8 bit)
597 \details Executes a Unprivileged LDRT instruction for 8 bit value.
598 \param [in] ptr Pointer to data
599 \return value of type uint8_t at (*ptr)
600 */
601 #define __LDRBT(ptr) ((uint8_t ) __ldrt(ptr))
602
603
604 /**
605 \brief LDRT Unprivileged (16 bit)
606 \details Executes a Unprivileged LDRT instruction for 16 bit values.
607 \param [in] ptr Pointer to data
608 \return value of type uint16_t at (*ptr)
609 */
610 #define __LDRHT(ptr) ((uint16_t) __ldrt(ptr))
611
612
613 /**
614 \brief LDRT Unprivileged (32 bit)
615 \details Executes a Unprivileged LDRT instruction for 32 bit values.
616 \param [in] ptr Pointer to data
617 \return value of type uint32_t at (*ptr)
618 */
619 #define __LDRT(ptr) ((uint32_t ) __ldrt(ptr))
620
621
622 /**
623 \brief STRT Unprivileged (8 bit)
624 \details Executes a Unprivileged STRT instruction for 8 bit values.
625 \param [in] value Value to store
626 \param [in] ptr Pointer to location
627 */
628 #define __STRBT(value, ptr) __strt(value, ptr)
629
630
631 /**
632 \brief STRT Unprivileged (16 bit)
633 \details Executes a Unprivileged STRT instruction for 16 bit values.
634 \param [in] value Value to store
635 \param [in] ptr Pointer to location
636 */
637 #define __STRHT(value, ptr) __strt(value, ptr)
638
639
640 /**
641 \brief STRT Unprivileged (32 bit)
642 \details Executes a Unprivileged STRT instruction for 32 bit values.
643 \param [in] value Value to store
644 \param [in] ptr Pointer to location
645 */
646 #define __STRT(value, ptr) __strt(value, ptr)
647
648 #endif /* (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U) */
649
650 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
651
652
653 /* ################### Compiler specific Intrinsics ########################### */
654 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
655 Access to dedicated SIMD instructions
656 @{
657 */
658
659 #if (__CORTEX_M >= 0x04U) /* only for Cortex-M4 and above */
660
661 #define __SADD8 __sadd8
662 #define __QADD8 __qadd8
663 #define __SHADD8 __shadd8
664 #define __UADD8 __uadd8
665 #define __UQADD8 __uqadd8
666 #define __UHADD8 __uhadd8
667 #define __SSUB8 __ssub8
668 #define __QSUB8 __qsub8
669 #define __SHSUB8 __shsub8
670 #define __USUB8 __usub8
671 #define __UQSUB8 __uqsub8
672 #define __UHSUB8 __uhsub8
673 #define __SADD16 __sadd16
674 #define __QADD16 __qadd16
675 #define __SHADD16 __shadd16
676 #define __UADD16 __uadd16
677 #define __UQADD16 __uqadd16
678 #define __UHADD16 __uhadd16
679 #define __SSUB16 __ssub16
680 #define __QSUB16 __qsub16
681 #define __SHSUB16 __shsub16
682 #define __USUB16 __usub16
683 #define __UQSUB16 __uqsub16
684 #define __UHSUB16 __uhsub16
685 #define __SASX __sasx
686 #define __QASX __qasx
687 #define __SHASX __shasx
688 #define __UASX __uasx
689 #define __UQASX __uqasx
690 #define __UHASX __uhasx
691 #define __SSAX __ssax
692 #define __QSAX __qsax
693 #define __SHSAX __shsax
694 #define __USAX __usax
695 #define __UQSAX __uqsax
696 #define __UHSAX __uhsax
697 #define __USAD8 __usad8
698 #define __USADA8 __usada8
699 #define __SSAT16 __ssat16
700 #define __USAT16 __usat16
701 #define __UXTB16 __uxtb16
702 #define __UXTAB16 __uxtab16
703 #define __SXTB16 __sxtb16
704 #define __SXTAB16 __sxtab16
705 #define __SMUAD __smuad
706 #define __SMUADX __smuadx
707 #define __SMLAD __smlad
708 #define __SMLADX __smladx
709 #define __SMLALD __smlald
710 #define __SMLALDX __smlaldx
711 #define __SMUSD __smusd
712 #define __SMUSDX __smusdx
713 #define __SMLSD __smlsd
714 #define __SMLSDX __smlsdx
715 #define __SMLSLD __smlsld
716 #define __SMLSLDX __smlsldx
717 #define __SEL __sel
718 #define __QADD __qadd
719 #define __QSUB __qsub
720
721 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
722 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
723
724 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
725 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
726
727 #define __SMMLA(ARG1,ARG2,ARG3) ( (int32_t)((((int64_t)(ARG1) * (ARG2)) + \
728 ((int64_t)(ARG3) << 32U) ) >> 32U))
729
730 #endif /* (__CORTEX_M >= 0x04) */
731 /*@} end of group CMSIS_SIMD_intrinsics */
732
733
734 #endif /* __CMSIS_ARMCC_H */