Ви не можете вибрати більше 25 тем Теми мають розпочинатися з літери або цифри, можуть містити дефіси (-) і не повинні перевищувати 35 символів.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374
  1. /**************************************************************************//**
  2. * @file core_cmInstr.h
  3. * @brief CMSIS Cortex-M Core Instruction Access Header File
  4. * @version V3.01
  5. * @date 06. March 2012
  6. *
  7. * @note
  8. * Copyright (C) 2009-2012 ARM Limited. All rights reserved.
  9. *
  10. * @par
  11. * ARM Limited (ARM) is supplying this software for use with Cortex-M
  12. * processor based microcontrollers. This file can be freely distributed
  13. * within development tools that are supporting such ARM based processors.
  14. *
  15. * @par
  16. * THIS SOFTWARE IS PROVIDED "AS IS". NO WARRANTIES, WHETHER EXPRESS, IMPLIED
  17. * OR STATUTORY, INCLUDING, BUT NOT LIMITED TO, IMPLIED WARRANTIES OF
  18. * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE APPLY TO THIS SOFTWARE.
  19. * ARM SHALL NOT, IN ANY CIRCUMSTANCES, BE LIABLE FOR SPECIAL, INCIDENTAL, OR
  20. * CONSEQUENTIAL DAMAGES, FOR ANY REASON WHATSOEVER.
  21. *
  22. ******************************************************************************/
  23. #ifndef __CORE_CMINSTR_H
  24. #define __CORE_CMINSTR_H
  25. /* ########################## Core Instruction Access ######################### */
  26. /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
  27. Access to dedicated instructions
  28. @{
  29. */
  30. #if defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
  31. /* GNU gcc specific functions */
  32. /** \brief No Operation
  33. No Operation does nothing. This instruction can be used for code alignment purposes.
  34. */
  35. __attribute__( ( always_inline ) ) __STATIC_INLINE void __NOP(void)
  36. {
  37. __ASM volatile ("nop");
  38. }
  39. /** \brief Wait For Interrupt
  40. Wait For Interrupt is a hint instruction that suspends execution
  41. until one of a number of events occurs.
  42. */
  43. __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFI(void)
  44. {
  45. __ASM volatile ("wfi");
  46. }
  47. /** \brief Wait For Event
  48. Wait For Event is a hint instruction that permits the processor to enter
  49. a low-power state until one of a number of events occurs.
  50. */
  51. __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFE(void)
  52. {
  53. __ASM volatile ("wfe");
  54. }
  55. /** \brief Send Event
  56. Send Event is a hint instruction. It causes an event to be signaled to the CPU.
  57. */
  58. __attribute__( ( always_inline ) ) __STATIC_INLINE void __SEV(void)
  59. {
  60. __ASM volatile ("sev");
  61. }
  62. /** \brief Instruction Synchronization Barrier
  63. Instruction Synchronization Barrier flushes the pipeline in the processor,
  64. so that all instructions following the ISB are fetched from cache or
  65. memory, after the instruction has been completed.
  66. */
  67. __attribute__( ( always_inline ) ) __STATIC_INLINE void __ISB(void)
  68. {
  69. __ASM volatile ("isb");
  70. }
  71. /** \brief Data Synchronization Barrier
  72. This function acts as a special kind of Data Memory Barrier.
  73. It completes when all explicit memory accesses before this instruction complete.
  74. */
  75. __attribute__( ( always_inline ) ) __STATIC_INLINE void __DSB(void)
  76. {
  77. __ASM volatile ("dsb");
  78. }
  79. /** \brief Data Memory Barrier
  80. This function ensures the apparent order of the explicit memory operations before
  81. and after the instruction, without ensuring their completion.
  82. */
  83. __attribute__( ( always_inline ) ) __STATIC_INLINE void __DMB(void)
  84. {
  85. __ASM volatile ("dmb");
  86. }
  87. /** \brief Reverse byte order (32 bit)
  88. This function reverses the byte order in integer value.
  89. \param [in] value Value to reverse
  90. \return Reversed value
  91. */
  92. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV(uint32_t value)
  93. {
  94. uint32_t result;
  95. __ASM volatile ("rev %0, %1" : "=r" (result) : "r" (value) );
  96. return(result);
  97. }
  98. /** \brief Reverse byte order (16 bit)
  99. This function reverses the byte order in two unsigned short values.
  100. \param [in] value Value to reverse
  101. \return Reversed value
  102. */
  103. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV16(uint32_t value)
  104. {
  105. uint32_t result;
  106. __ASM volatile ("rev16 %0, %1" : "=r" (result) : "r" (value) );
  107. return(result);
  108. }
  109. /** \brief Reverse byte order in signed short value
  110. This function reverses the byte order in a signed short value with sign extension to integer.
  111. \param [in] value Value to reverse
  112. \return Reversed value
  113. */
  114. __attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __REVSH(int32_t value)
  115. {
  116. uint32_t result;
  117. __ASM volatile ("revsh %0, %1" : "=r" (result) : "r" (value) );
  118. return(result);
  119. }
  120. /** \brief Rotate Right in unsigned value (32 bit)
  121. This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
  122. \param [in] value Value to rotate
  123. \param [in] value Number of Bits to rotate
  124. \return Rotated value
  125. */
  126. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
  127. {
  128. __ASM volatile ("ror %0, %0, %1" : "+r" (op1) : "r" (op2) );
  129. return(op1);
  130. }
  131. #if (__CORTEX_M >= 0x03)
  132. /** \brief Reverse bit order of value
  133. This function reverses the bit order of the given value.
  134. \param [in] value Value to reverse
  135. \return Reversed value
  136. */
  137. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
  138. {
  139. uint32_t result;
  140. __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
  141. return(result);
  142. }
  143. /** \brief LDR Exclusive (8 bit)
  144. This function performs a exclusive LDR command for 8 bit value.
  145. \param [in] ptr Pointer to data
  146. \return value of type uint8_t at (*ptr)
  147. */
  148. __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
  149. {
  150. uint8_t result;
  151. __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) );
  152. return(result);
  153. }
  154. /** \brief LDR Exclusive (16 bit)
  155. This function performs a exclusive LDR command for 16 bit values.
  156. \param [in] ptr Pointer to data
  157. \return value of type uint16_t at (*ptr)
  158. */
  159. __attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
  160. {
  161. uint16_t result;
  162. __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) );
  163. return(result);
  164. }
  165. /** \brief LDR Exclusive (32 bit)
  166. This function performs a exclusive LDR command for 32 bit values.
  167. \param [in] ptr Pointer to data
  168. \return value of type uint32_t at (*ptr)
  169. */
  170. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)
  171. {
  172. uint32_t result;
  173. __ASM volatile ("ldrex %0, [%1]" : "=r" (result) : "r" (addr) );
  174. return(result);
  175. }
  176. /** \brief STR Exclusive (8 bit)
  177. This function performs a exclusive STR command for 8 bit values.
  178. \param [in] value Value to store
  179. \param [in] ptr Pointer to location
  180. \return 0 Function succeeded
  181. \return 1 Function failed
  182. */
  183. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
  184. {
  185. uint32_t result;
  186. __ASM volatile ("strexb %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );
  187. return(result);
  188. }
  189. /** \brief STR Exclusive (16 bit)
  190. This function performs a exclusive STR command for 16 bit values.
  191. \param [in] value Value to store
  192. \param [in] ptr Pointer to location
  193. \return 0 Function succeeded
  194. \return 1 Function failed
  195. */
  196. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
  197. {
  198. uint32_t result;
  199. __ASM volatile ("strexh %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );
  200. return(result);
  201. }
  202. /** \brief STR Exclusive (32 bit)
  203. This function performs a exclusive STR command for 32 bit values.
  204. \param [in] value Value to store
  205. \param [in] ptr Pointer to location
  206. \return 0 Function succeeded
  207. \return 1 Function failed
  208. */
  209. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
  210. {
  211. uint32_t result;
  212. __ASM volatile ("strex %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );
  213. return(result);
  214. }
  215. /** \brief Remove the exclusive lock
  216. This function removes the exclusive lock which is created by LDREX.
  217. */
  218. __attribute__( ( always_inline ) ) __STATIC_INLINE void __CLREX(void)
  219. {
  220. __ASM volatile ("clrex");
  221. }
  222. /** \brief Signed Saturate
  223. This function saturates a signed value.
  224. \param [in] value Value to be saturated
  225. \param [in] sat Bit position to saturate to (1..32)
  226. \return Saturated value
  227. */
  228. #define __SSAT(ARG1,ARG2) \
  229. ({ \
  230. uint32_t __RES, __ARG1 = (ARG1); \
  231. __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
  232. __RES; \
  233. })
  234. /** \brief Unsigned Saturate
  235. This function saturates an unsigned value.
  236. \param [in] value Value to be saturated
  237. \param [in] sat Bit position to saturate to (0..31)
  238. \return Saturated value
  239. */
  240. #define __USAT(ARG1,ARG2) \
  241. ({ \
  242. uint32_t __RES, __ARG1 = (ARG1); \
  243. __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
  244. __RES; \
  245. })
  246. /** \brief Count leading zeros
  247. This function counts the number of leading zeros of a data value.
  248. \param [in] value Value to count the leading zeros
  249. \return number of leading zeros in value
  250. */
  251. __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __CLZ(uint32_t value)
  252. {
  253. uint8_t result;
  254. __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
  255. return(result);
  256. }
  257. #endif /* (__CORTEX_M >= 0x03) */
  258. #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
  259. /* TASKING carm specific functions */
  260. /*
  261. * The CMSIS functions have been implemented as intrinsics in the compiler.
  262. * Please use "carm -?i" to get an up to date list of all intrinsics,
  263. * Including the CMSIS ones.
  264. */
  265. #endif
  266. /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
  267. #endif /* __CORE_CMINSTR_H */