libopencm3
A free/libre/open-source firmware library for various ARM Cortex-M3 microcontrollers.
cortex.h
Go to the documentation of this file.
1/** @defgroup CM3_cortex_defines Cortex Core Defines
2 *
3 * @brief <b>libopencm3 Defined Constants and Types for the Cortex Core </b>
4 *
5 * @ingroup CM3_defines
6 *
7 * @version 1.0.0
8 *
9 * LGPL License Terms @ref lgpl_license
10 */
11/*
12 * This file is part of the libopencm3 project.
13 *
14 * Copyright (C) 2013 Ben Gamari <bgamari@gmail.com>
15 * Copyright (C) 2013 Frantisek Burian <BuFran@seznam.cz>
16 *
17 * This library is free software: you can redistribute it and/or modify
18 * it under the terms of the GNU Lesser General Public License as published by
19 * the Free Software Foundation, either version 3 of the License, or
20 * (at your option) any later version.
21 *
22 * This library is distributed in the hope that it will be useful,
23 * but WITHOUT ANY WARRANTY; without even the implied warranty of
24 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
25 * GNU Lesser General Public License for more details.
26 *
27 * You should have received a copy of the GNU Lesser General Public License
28 * along with this library. If not, see <http://www.gnu.org/licenses/>.
29 */
30
31#ifndef LIBOPENCM3_CORTEX_H
32#define LIBOPENCM3_CORTEX_H
33
34/**@{*/
35
36#include <stdbool.h>
37#include <stdint.h>
38
39/*---------------------------------------------------------------------------*/
40/** @brief Cortex M Enable interrupts
41 *
42 * Disable the interrupt mask and enable interrupts globally
43 */
44static inline void cm_enable_interrupts(void)
45{
46 __asm__ volatile ("CPSIE I\n");
47}
48
49/*---------------------------------------------------------------------------*/
50/** @brief Cortex M Disable interrupts
51 *
52 * Mask all interrupts globally
53 */
54static inline void cm_disable_interrupts(void)
55{
56 __asm__ volatile ("CPSID I\n");
57}
58
59/*---------------------------------------------------------------------------*/
60/** @brief Cortex M Enable faults
61 *
62 * Disable the HardFault mask and enable fault interrupt globally
63 */
64static inline void cm_enable_faults(void)
65{
66 __asm__ volatile ("CPSIE F\n");
67}
68
69/*---------------------------------------------------------------------------*/
70/** @brief Cortex M Disable faults
71 *
72 * Mask the HardFault interrupt globally
73 */
74static inline void cm_disable_faults(void)
75{
76 __asm__ volatile ("CPSID F\n");
77}
78
79/*---------------------------------------------------------------------------*/
80/** @brief Cortex M Check if interrupts are masked
81 *
82 * Checks, if interrupts are masked (disabled).
83 *
84 * @returns true, if interrupts are disabled.
85 */
86__attribute__((always_inline))
87static inline bool cm_is_masked_interrupts(void)
88{
89 register uint32_t result;
90 __asm__ volatile ("MRS %0, PRIMASK" : "=r" (result));
91 return result;
92}
93
94#if defined(__ARM_ARCH_7M__) || defined(__ARM_ARCH_7EM__)
95/*---------------------------------------------------------------------------*/
96/** @brief Cortex M Check if Fault interrupt is masked
97 *
98 * Checks, if HardFault interrupt is masked (disabled).
99 *
100 * @returns bool true, if HardFault interrupt is disabled.
101 */
102__attribute__((always_inline))
103static inline bool cm_is_masked_faults(void)
104{
105 register uint32_t result;
106 __asm__ volatile ("MRS %0, FAULTMASK" : "=r" (result));
107 return result;
108}
109#endif
110
111/*---------------------------------------------------------------------------*/
112/** @brief Cortex M Mask interrupts
113 *
114 * This function switches the mask of the interrupts. If mask is true, the
115 * interrupts will be disabled. The result of this function can be used for
116 * restoring previous state of the mask.
117 *
118 * @param[in] mask uint32_t New state of the interrupt mask
119 * @returns uint32_t old state of the interrupt mask
120 */
121__attribute__((always_inline))
122static inline uint32_t cm_mask_interrupts(uint32_t mask)
123{
124 register uint32_t old;
125 __asm__ __volatile__("MRS %0, PRIMASK" : "=r" (old));
126 __asm__ __volatile__("" : : : "memory");
127 __asm__ __volatile__("MSR PRIMASK, %0" : : "r" (mask));
128 return old;
129}
130
131#if defined(__ARM_ARCH_7M__) || defined(__ARM_ARCH_7EM__)
132/*---------------------------------------------------------------------------*/
133/** @brief Cortex M Mask HardFault interrupt
134 *
135 * This function switches the mask of the HardFault interrupt. If mask is true,
136 * the HardFault interrupt will be disabled. The result of this function can be
137 * used for restoring previous state of the mask.
138 *
139 * @param[in] mask uint32_t New state of the HardFault interrupt mask
140 * @returns uint32_t old state of the HardFault interrupt mask
141 */
142__attribute__((always_inline))
143static inline uint32_t cm_mask_faults(uint32_t mask)
144{
145 register uint32_t old;
146 __asm__ __volatile__ ("MRS %0, FAULTMASK" : "=r" (old));
147 __asm__ __volatile__ ("" : : : "memory");
148 __asm__ __volatile__ ("MSR FAULTMASK, %0" : : "r" (mask));
149 return old;
150}
151#endif
152
153/**@}*/
154
155/*===========================================================================*/
156/** @defgroup CM3_cortex_atomic_defines Cortex Core Atomic support Defines
157 *
158 * @brief Atomic operation support
159 *
160 * @ingroup CM3_cortex_defines
161 */
162/**@{*/
163
164#if !defined(__DOXYGEN__)
165/* Do not populate this definition outside */
166static inline uint32_t __cm_atomic_set(uint32_t *val)
167{
168 return cm_mask_interrupts(*val);
169}
170
171#define __CM_SAVER(state) \
172 __val = (state), \
173 __save __attribute__((__cleanup__(__cm_atomic_set))) = \
174 __cm_atomic_set(&__val)
175
176#endif /* !defined(__DOXYGEN) */
177
178
179/*---------------------------------------------------------------------------*/
180/** @brief Cortex M Atomic Declare block
181 *
182 * This macro disables interrupts for the next command or block of code. The
183 * interrupt mask is automatically restored after exit of the boundary of the
184 * code block. Therefore restore of interrupt is done automatically after call
185 * of return or goto control sentence jumping outside of the block.
186 *
187 * @warning The usage of sentences break or continue is prohibited in the block
188 * due to implementation of this macro!
189 *
190 * @note It is safe to use this block inside normal code and in interrupt
191 * routine.
192 *
193 * Basic usage of atomic block
194 *
195 * @code
196 * uint64_t value; // This value is used somewhere in interrupt
197 *
198 * ...
199 *
200 * CM_ATOMIC_BLOCK() { // interrupts are masked in this block
201 * value = value * 1024 + 651; // access value as atomic
202 * } // interrupts is restored automatically
203 * @endcode
204 *
205 * Use of return inside block
206 *
207 * @code
208 * uint64_t value; // This value is used somewhere in interrupt
209 *
210 * ...
211 *
212 * uint64_t allocval(void)
213 * {
214 * CM_ATOMIC_BLOCK() { // interrupts are masked in this block
215 * value = value * 1024 + 651; // do long atomic operation
216 * return value; // interrupts is restored automatically
217 * }
218 * }
219 * @endcode
220 */
221#if defined(__DOXYGEN__)
222#define CM_ATOMIC_BLOCK()
223#else /* defined(__DOXYGEN__) */
224#define CM_ATOMIC_BLOCK() \
225 for (uint32_t __CM_SAVER(true), __my = true; __my; __my = false)
226#endif /* defined(__DOXYGEN__) */
227
228/*---------------------------------------------------------------------------*/
229/** @brief Cortex M Atomic Declare context
230 *
231 * This macro disables interrupts in the current block of code from the place
232 * where it is defined to the end of the block. The interrupt mask is
233 * automatically restored after exit of the boundary of the code block.
234 * Therefore restore of interrupt is done automatically after call of return,
235 * continue, break, or goto control sentence jumping outside of the block.
236 *
237 * @note This function is intended for use in for- cycles to enable the use of
238 * break and contine sentences inside the block, and for securing the atomic
239 * reader-like functions.
240 *
241 * @note It is safe to use this block inside normal code and in interrupt
242 * routine.
243 *
244 * Basic usage of atomic context
245 *
246 * @code
247 * uint64_t value; // This value is used somewhere in interrupt
248 *
249 * ...
250 *
251 * for (int i=0;i < 100; i++) {
252 * CM_ATOMIC_CONTEXT(); // interrupts are masked in this block
253 * value += 100; // access value as atomic
254 * if ((value % 16) == 0) {
255 * break; // restore interrupts and break cycle
256 * }
257 * } // interrupts is restored automatically
258 * @endcode
259 *
260 * Usage of atomic context inside atomic reader fcn.
261 *
262 * @code
263 * uint64_t value; // This value is used somewhere in interrupt
264 *
265 * ...
266 *
267 * uint64_t getnextval(void)
268 * {
269 * CM_ATOMIC_CONTEXT(); // interrupts are masked in this block
270 * value = value + 3; // do long atomic operation
271 * return value; // interrupts is restored automatically
272 * }
273 * @endcode
274 */
275#if defined(__DOXYGEN__)
276#define CM_ATOMIC_CONTEXT()
277#else /* defined(__DOXYGEN__) */
278#define CM_ATOMIC_CONTEXT() uint32_t __CM_SAVER(true)
279#endif /* defined(__DOXYGEN__) */
280
281/**@}*/
282
283
284
285#endif
static uint32_t __cm_atomic_set(uint32_t *val)
Definition: cortex.h:166
static void cm_enable_interrupts(void)
Cortex M Enable interrupts.
Definition: cortex.h:44
static void cm_disable_faults(void)
Cortex M Disable faults.
Definition: cortex.h:74
static void cm_enable_faults(void)
Cortex M Enable faults.
Definition: cortex.h:64
static void cm_disable_interrupts(void)
Cortex M Disable interrupts.
Definition: cortex.h:54
static bool cm_is_masked_faults(void)
Cortex M Check if Fault interrupt is masked.
Definition: cortex.h:103
static bool cm_is_masked_interrupts(void)
Cortex M Check if interrupts are masked.
Definition: cortex.h:87
static uint32_t cm_mask_faults(uint32_t mask)
Cortex M Mask HardFault interrupt.
Definition: cortex.h:143
static uint32_t cm_mask_interrupts(uint32_t mask)
Cortex M Mask interrupts.
Definition: cortex.h:122