Select one of the symbols to view example projects that use it.
 
Outline
#include "freertos/FreeRTOS.h"
#include "soc/soc_caps.h"
#include "sdkconfig.h"
#include "xtensa/config/core-isa.h"
#define CONFIG_STDATOMIC_S32C1I_SPIRAM_WORKAROUND
#define HAS_ATOMICS_32
#define HAS_ATOMICS_64
#define __riscv_atomic
#define HAS_ATOMICS_32
#define HAS_ATOMICS_64
#define _ATOMIC_ENTER_CRITICAL
#define _ATOMIC_EXIT_CRITICAL
#define _ATOMIC_ENTER_CRITICAL
#define _ATOMIC_EXIT_CRITICAL
#define _ATOMIC_ENTER_CRITICAL
#define _ATOMIC_EXIT_CRITICAL
#define _ATOMIC_HW_STUB_OP_FUNCTION
#define _ATOMIC_HW_STUB_EXCHANGE
#define _ATOMIC_HW_STUB_STORE
#define _ATOMIC_HW_STUB_CMP_EXCHANGE
#define _ATOMIC_HW_STUB_LOAD
#define _ATOMIC_HW_STUB_SYNC_BOOL_CMP_EXCHANGE
#define _ATOMIC_HW_STUB_SYNC_VAL_CMP_EXCHANGE
#define _ATOMIC_HW_STUB_SYNC_LOCK_TEST_AND_SET
#define _ATOMIC_HW_STUB_SYNC_LOCK_RELEASE
#define CLANG_ATOMIC_SUFFIX
#define CLANG_ATOMIC_SUFFIX
#define CLANG_DECLARE_ALIAS
Files
loading (2/5)...
SourceVuESP-IDF Framework and ExamplesESP-IDFcomponents/newlib/priv_include/esp_stdatomic.h
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
/* * SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD * * SPDX-License-Identifier: Apache-2.0 *//* ... */ #include "freertos/FreeRTOS.h" #include "soc/soc_caps.h" #include "sdkconfig.h" #ifdef __XTENSA__ #include "xtensa/config/core-isa.h" #ifndef XCHAL_HAVE_S32C1I #error "XCHAL_HAVE_S32C1I not defined, include correct header!" #endif // XCHAL_HAVE_S32C1I #ifndef CONFIG_STDATOMIC_S32C1I_SPIRAM_WORKAROUND #define CONFIG_STDATOMIC_S32C1I_SPIRAM_WORKAROUND 0 #endif #define HAS_ATOMICS_32 ((XCHAL_HAVE_S32C1I == 1) && !CONFIG_STDATOMIC_S32C1I_SPIRAM_WORKAROUND) // no 64-bit atomics on Xtensa #define HAS_ATOMICS_64 0/* ... */ #else // RISCV // GCC toolchain will define this pre-processor if "A" extension is supported #ifndef __riscv_atomic #define __riscv_atomic 0 #endif #define HAS_ATOMICS_32 (__riscv_atomic == 1) #define HAS_ATOMICS_64 ((__riscv_atomic == 1) && (__riscv_xlen == 64))/* ... */ #endif // (__XTENSA__, __riscv) #if SOC_CPU_CORES_NUM == 1 // Single core SoC: atomics can be implemented using portSET_INTERRUPT_MASK_FROM_ISR // and portCLEAR_INTERRUPT_MASK_FROM_ISR, which disables and enables interrupts. #if CONFIG_FREERTOS_SMP #define _ATOMIC_ENTER_CRITICAL() unsigned int state = portDISABLE_INTERRUPTS(); #define _ATOMIC_EXIT_CRITICAL() portRESTORE_INTERRUPTS(state)/* ... */ #else // CONFIG_FREERTOS_SMP #define _ATOMIC_ENTER_CRITICAL() unsigned int state = portSET_INTERRUPT_MASK_FROM_ISR() #define _ATOMIC_EXIT_CRITICAL() portCLEAR_INTERRUPT_MASK_FROM_ISR(state)/* ... */ #endif // CONFIG_FREERTOS_SMP/* ... */ #else // SOC_CPU_CORES_NUM #define _ATOMIC_ENTER_CRITICAL() portENTER_CRITICAL_SAFE(&s_atomic_lock); #define _ATOMIC_EXIT_CRITICAL() portEXIT_CRITICAL_SAFE(&s_atomic_lock); /* ... */ #endif // SOC_CPU_CORES_NUM #if CONFIG_STDATOMIC_S32C1I_SPIRAM_WORKAROUND #define _ATOMIC_IF_NOT_EXT_RAM() \ if (!((uintptr_t)ptr >= SOC_EXTRAM_DATA_LOW && (uintptr_t) ptr < SOC_EXTRAM_DATA_HIGH))... #define _ATOMIC_HW_STUB_OP_FUNCTION(n, type, name_1, name_2) \ _ATOMIC_IF_NOT_EXT_RAM() { \ type __atomic_s32c1i_ ##name_1 ##_ ##name_2 ##_ ##n (volatile void* ptr, type value, int memorder); \ return __atomic_s32c1i_ ##name_1 ##_ ##name_2 ##_ ##n (ptr, value, memorder); \ }{...} ... #define _ATOMIC_HW_STUB_EXCHANGE(n, type) \ _ATOMIC_IF_NOT_EXT_RAM() { \ type __atomic_s32c1i_exchange_ ## n (volatile void* ptr, type value, int memorder); \ return __atomic_s32c1i_exchange_ ## n (ptr, value, memorder); \ }{...} ... #define _ATOMIC_HW_STUB_STORE(n, type) \ _ATOMIC_IF_NOT_EXT_RAM() { \ void __atomic_s32c1i_store_ ## n (volatile void * ptr, type value, int memorder); \ __atomic_s32c1i_store_ ## n (ptr, value, memorder); \ return; \ }{...} ... #define _ATOMIC_HW_STUB_CMP_EXCHANGE(n, type) \ _ATOMIC_IF_NOT_EXT_RAM() { \ bool __atomic_s32c1i_compare_exchange_ ## n (volatile void* ptr, void* expected, type desired, bool weak, int success, int failure); \ return __atomic_s32c1i_compare_exchange_ ## n (ptr, expected, desired, weak, success, failure); \ }{...} ... #define _ATOMIC_HW_STUB_LOAD(n, type) \ _ATOMIC_IF_NOT_EXT_RAM() { \ type __atomic_s32c1i_load_ ## n (const volatile void* ptr, int memorder); \ return __atomic_s32c1i_load_ ## n (ptr,memorder); \ }{...} ... #define _ATOMIC_HW_STUB_SYNC_BOOL_CMP_EXCHANGE(n, type) \ _ATOMIC_IF_NOT_EXT_RAM() { \ bool __sync_s32c1i_bool_compare_and_swap_ ## n (volatile void* ptr, type expected, type desired); \ return __sync_s32c1i_bool_compare_and_swap_ ## n (ptr, expected, desired); \ }{...} ... #define _ATOMIC_HW_STUB_SYNC_VAL_CMP_EXCHANGE(n, type) \ _ATOMIC_IF_NOT_EXT_RAM() { \ type __sync_s32c1i_val_compare_and_swap_ ## n (volatile void* ptr, type expected, type desired); \ return __sync_s32c1i_val_compare_and_swap_ ## n (ptr, expected, desired); \ }{...} ... #define _ATOMIC_HW_STUB_SYNC_LOCK_TEST_AND_SET(n, type) \ _ATOMIC_IF_NOT_EXT_RAM() { \ type __sync_s32c1i_lock_test_and_set_ ## n (volatile void* ptr, type value); \ return __sync_s32c1i_lock_test_and_set_ ## n (ptr, value); \ }{...} ... #define _ATOMIC_HW_STUB_SYNC_LOCK_RELEASE(n, type) \ _ATOMIC_IF_NOT_EXT_RAM() { \ void __sync_s32c1i_lock_release_ ## n (volatile void* ptr); \ __sync_s32c1i_lock_release_ ## n (ptr); \ return; \ }{...} ... 10 defines#else/* ... */ // CONFIG_STDATOMIC_S32C1I_SPIRAM_WORKAROUND #define _ATOMIC_HW_STUB_OP_FUNCTION(n, type, name_1, name_2) #define _ATOMIC_HW_STUB_EXCHANGE(n, type) #define _ATOMIC_HW_STUB_STORE(n, type) #define _ATOMIC_HW_STUB_CMP_EXCHANGE(n, type) #define _ATOMIC_HW_STUB_LOAD(n, type) #define _ATOMIC_HW_STUB_SYNC_BOOL_CMP_EXCHANGE(n, type) #define _ATOMIC_HW_STUB_SYNC_VAL_CMP_EXCHANGE(n, type) #define _ATOMIC_HW_STUB_SYNC_LOCK_TEST_AND_SET(n, type) #define _ATOMIC_HW_STUB_SYNC_LOCK_RELEASE(n, type)9 defines /* ... */ #endif // CONFIG_STDATOMIC_S32C1I_SPIRAM_WORKAROUND #ifdef __clang__ // Clang doesn't allow to define "__sync_*" atomics. The workaround is to define function with name "__sync_*_builtin", // which implements "__sync_*" atomic functionality and use asm directive to set the value of symbol "__sync_*" to the name // of defined function. #define CLANG_ATOMIC_SUFFIX(name_) name_ ## _builtin #define CLANG_DECLARE_ALIAS(name_) \ __asm__(".type " # name_ ", @function\n" \ ".global " #name_ "\n" \ ".equ " #name_ ", " #name_ "_builtin");... /* ... */ #else // __clang__ #define CLANG_ATOMIC_SUFFIX(name_) name_ #define CLANG_DECLARE_ALIAS(name_) /* ... */ #endif // __clang__ #define ATOMIC_OP_FUNCTIONS(n, type, name, operation, inverse) \ _ATOMIC_OP_FUNCTION(n, type, fetch, name, old, operation, inverse) \ _ATOMIC_OP_FUNCTION(n, type, name, fetch, new, operation, inverse)... #define _ATOMIC_OP_FUNCTION(n, type, name_1, name_2, ret_var, operation, inverse) \ type __atomic_ ##name_1 ##_ ##name_2 ##_ ##n (volatile void* ptr, type value, int memorder) \ { \ type old, new; \ _ATOMIC_HW_STUB_OP_FUNCTION(n, type, name_1, name_2); \ _ATOMIC_ENTER_CRITICAL(); \ old = (*(volatile type*)ptr); \ new = inverse(old operation value); \ *(volatile type*)ptr = new; \ _ATOMIC_EXIT_CRITICAL(); \ return ret_var; \ }{...} ... #define ATOMIC_LOAD(n, type) \ type __atomic_load_ ## n (const volatile void* ptr, int memorder) \ { \ type old; \ _ATOMIC_HW_STUB_LOAD(n, type); \ _ATOMIC_ENTER_CRITICAL(); \ old = *(const volatile type*)ptr; \ _ATOMIC_EXIT_CRITICAL(); \ return old; \ }{...} ... #define ATOMIC_CMP_EXCHANGE(n, type) \ bool __atomic_compare_exchange_ ## n (volatile void* ptr, void* expected, type desired, bool weak, int success, int failure) \ { \ bool ret = false; \ _ATOMIC_HW_STUB_CMP_EXCHANGE(n, type); \ _ATOMIC_ENTER_CRITICAL(); \ if (*(volatile type*)ptr == *(type*)expected) { \ ret = true; \ *(volatile type*)ptr = desired; \ }{...} else { \ *(type*)expected = *(volatile type*)ptr; \ }{...} \ _ATOMIC_EXIT_CRITICAL(); \ return ret; \ }{...} ... #define ATOMIC_STORE(n, type) \ void __atomic_store_ ## n (volatile void * ptr, type value, int memorder) \ { \ _ATOMIC_HW_STUB_STORE(n, type); \ _ATOMIC_ENTER_CRITICAL(); \ *(volatile type*)ptr = value; \ _ATOMIC_EXIT_CRITICAL(); \ }{...} ... #define ATOMIC_EXCHANGE(n, type) \ type __atomic_exchange_ ## n (volatile void* ptr, type value, int memorder) \ { \ type old; \ _ATOMIC_HW_STUB_EXCHANGE(n, type); \ _ATOMIC_ENTER_CRITICAL(); \ old = *(volatile type*)ptr; \ *(volatile type*)ptr = value; \ _ATOMIC_EXIT_CRITICAL(); \ return old; \ }{...} ... #define SYNC_OP_FUNCTIONS(n, type, name) \ _SYNC_OP_FUNCTION(n, type, fetch, name) \ _SYNC_OP_FUNCTION(n, type, name, fetch)... #define _SYNC_OP_FUNCTION(n, type, name_1, name_2) \ type CLANG_ATOMIC_SUFFIX(__sync_ ##name_1 ##_and_ ##name_2 ##_ ##n) (volatile void* ptr, type value) \ { \ return __atomic_ ##name_1 ##_ ##name_2 ##_ ##n (ptr, value, __ATOMIC_SEQ_CST); \ }{...} \ CLANG_DECLARE_ALIAS( __sync_##name_1 ##_and_ ##name_2 ##_ ##n )... #define SYNC_BOOL_CMP_EXCHANGE(n, type) \ bool CLANG_ATOMIC_SUFFIX(__sync_bool_compare_and_swap_ ## n) (volatile void* ptr, type expected, type desired) \ { \ bool ret = false; \ _ATOMIC_HW_STUB_SYNC_BOOL_CMP_EXCHANGE(n, type); \ _ATOMIC_ENTER_CRITICAL(); \ if (*(volatile type*)ptr == expected) { \ *(volatile type*)ptr = desired; \ ret = true; \ }{...} \ _ATOMIC_EXIT_CRITICAL(); \ return ret; \ }{...} \ CLANG_DECLARE_ALIAS( __sync_bool_compare_and_swap_ ## n )... #define SYNC_VAL_CMP_EXCHANGE(n, type) \ type CLANG_ATOMIC_SUFFIX(__sync_val_compare_and_swap_ ## n) (volatile void* ptr, type expected, type desired) \ { \ type old; \ _ATOMIC_HW_STUB_SYNC_VAL_CMP_EXCHANGE(n, type); \ _ATOMIC_ENTER_CRITICAL(); \ old = *(volatile type*)ptr; \ if (old == expected) { \ *(volatile type*)ptr = desired; \ }{...} \ _ATOMIC_EXIT_CRITICAL(); \ return old; \ }{...} \ CLANG_DECLARE_ALIAS( __sync_val_compare_and_swap_ ## n )... #define SYNC_LOCK_TEST_AND_SET(n, type) \ type CLANG_ATOMIC_SUFFIX(__sync_lock_test_and_set_ ## n) (volatile void* ptr, type value) \ { \ type old; \ _ATOMIC_HW_STUB_SYNC_LOCK_TEST_AND_SET(n, type); \ _ATOMIC_ENTER_CRITICAL(); \ old = *(volatile type*)ptr; \ *(volatile type*)ptr = value; \ _ATOMIC_EXIT_CRITICAL(); \ return old; \ }{...} \ CLANG_DECLARE_ALIAS( __sync_lock_test_and_set_ ## n )... #define SYNC_LOCK_RELEASE(n, type) \ void CLANG_ATOMIC_SUFFIX(__sync_lock_release_ ## n) (volatile void* ptr) \ { \ _ATOMIC_HW_STUB_SYNC_LOCK_RELEASE(n, type); \ _ATOMIC_ENTER_CRITICAL(); \ *(volatile type*)ptr = 0; \ _ATOMIC_EXIT_CRITICAL(); \ }{...} \ CLANG_DECLARE_ALIAS( __sync_lock_release_ ## n )... #define ATOMIC_FUNCTIONS(n, type) \ ATOMIC_EXCHANGE(n, type) \ ATOMIC_CMP_EXCHANGE(n, type) \ ATOMIC_OP_FUNCTIONS(n, type, add, +, ) \ ATOMIC_OP_FUNCTIONS(n, type, sub, -, ) \ ATOMIC_OP_FUNCTIONS(n, type, and, &, ) \ ATOMIC_OP_FUNCTIONS(n, type, or, |, ) \ ATOMIC_OP_FUNCTIONS(n, type, xor, ^, ) \ ATOMIC_OP_FUNCTIONS(n, type, nand, &, ~) \ /* LLVM has not implemented native atomic load/stores for riscv targets without the Atomic extension. LLVM thread: https://reviews.llvm.org/D47553. \ * Even though GCC does transform them, these libcalls need to be available for the case where a LLVM based project links against IDF. *//* ... */ \ ATOMIC_LOAD(n, type) \ ATOMIC_STORE(n, type) \ SYNC_OP_FUNCTIONS(n, type, add) \ SYNC_OP_FUNCTIONS(n, type, sub) \ SYNC_OP_FUNCTIONS(n, type, and) \ SYNC_OP_FUNCTIONS(n, type, or) \ SYNC_OP_FUNCTIONS(n, type, xor) \ SYNC_OP_FUNCTIONS(n, type, nand) \ SYNC_BOOL_CMP_EXCHANGE(n, type) \ SYNC_VAL_CMP_EXCHANGE(n, type) \ SYNC_LOCK_TEST_AND_SET(n, type) \ SYNC_LOCK_RELEASE(n, type)...
Details
Show:
from
Types: Columns:
This file uses the notable symbols shown below. Click anywhere in the file to view more details.