LLVM OpenMP* Runtime Library
kmp_os.h
1 /*
2  * kmp_os.h -- KPTS runtime header file.
3  */
4 
5 //===----------------------------------------------------------------------===//
6 //
7 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
8 // See https://llvm.org/LICENSE.txt for license information.
9 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #ifndef KMP_OS_H
14 #define KMP_OS_H
15 
16 #include "kmp_config.h"
17 #include <atomic>
18 #include <stdarg.h>
19 #include <stdlib.h>
20 #include <string.h>
21 
22 #define KMP_FTN_PLAIN 1
23 #define KMP_FTN_APPEND 2
24 #define KMP_FTN_UPPER 3
25 /*
26 #define KMP_FTN_PREPEND 4
27 #define KMP_FTN_UAPPEND 5
28 */
29 
30 #define KMP_PTR_SKIP (sizeof(void *))
31 
32 /* -------------------------- Compiler variations ------------------------ */
33 
34 #define KMP_OFF 0
35 #define KMP_ON 1
36 
37 #define KMP_MEM_CONS_VOLATILE 0
38 #define KMP_MEM_CONS_FENCE 1
39 
40 #ifndef KMP_MEM_CONS_MODEL
41 #define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
42 #endif
43 
44 #ifndef __has_cpp_attribute
45 #define __has_cpp_attribute(x) 0
46 #endif
47 
48 #ifndef __has_attribute
49 #define __has_attribute(x) 0
50 #endif
51 
52 /* ------------------------- Compiler recognition ---------------------- */
53 #define KMP_COMPILER_ICC 0
54 #define KMP_COMPILER_GCC 0
55 #define KMP_COMPILER_CLANG 0
56 #define KMP_COMPILER_MSVC 0
57 #define KMP_COMPILER_ICX 0
58 
59 #if __INTEL_CLANG_COMPILER
60 #undef KMP_COMPILER_ICX
61 #define KMP_COMPILER_ICX 1
62 #elif defined(__INTEL_COMPILER)
63 #undef KMP_COMPILER_ICC
64 #define KMP_COMPILER_ICC 1
65 #elif defined(__clang__)
66 #undef KMP_COMPILER_CLANG
67 #define KMP_COMPILER_CLANG 1
68 #elif defined(__GNUC__)
69 #undef KMP_COMPILER_GCC
70 #define KMP_COMPILER_GCC 1
71 #elif defined(_MSC_VER)
72 #undef KMP_COMPILER_MSVC
73 #define KMP_COMPILER_MSVC 1
74 #else
75 #error Unknown compiler
76 #endif
77 
78 #if (KMP_OS_LINUX || KMP_OS_WINDOWS || KMP_OS_FREEBSD)
79 #define KMP_AFFINITY_SUPPORTED 1
80 #if KMP_OS_WINDOWS && KMP_ARCH_X86_64
81 #define KMP_GROUP_AFFINITY 1
82 #else
83 #define KMP_GROUP_AFFINITY 0
84 #endif
85 #else
86 #define KMP_AFFINITY_SUPPORTED 0
87 #define KMP_GROUP_AFFINITY 0
88 #endif
89 
90 #if (KMP_OS_LINUX || (KMP_OS_FREEBSD && __FreeBSD_version >= 1301000))
91 #define KMP_HAVE_SCHED_GETCPU 1
92 #else
93 #define KMP_HAVE_SCHED_GETCPU 0
94 #endif
95 
96 /* Check for quad-precision extension. */
97 #define KMP_HAVE_QUAD 0
98 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
99 #if KMP_COMPILER_ICC || KMP_COMPILER_ICX
100 /* _Quad is already defined for icc */
101 #undef KMP_HAVE_QUAD
102 #define KMP_HAVE_QUAD 1
103 #elif KMP_COMPILER_CLANG
104 /* Clang doesn't support a software-implemented
105  128-bit extended precision type yet */
106 typedef long double _Quad;
107 #elif KMP_COMPILER_GCC
108 /* GCC on NetBSD lacks __multc3/__divtc3 builtins needed for quad */
109 #if !KMP_OS_NETBSD
110 typedef __float128 _Quad;
111 #undef KMP_HAVE_QUAD
112 #define KMP_HAVE_QUAD 1
113 #endif
114 #elif KMP_COMPILER_MSVC
115 typedef long double _Quad;
116 #endif
117 #else
118 #if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
119 typedef long double _Quad;
120 #undef KMP_HAVE_QUAD
121 #define KMP_HAVE_QUAD 1
122 #endif
123 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
124 
125 #define KMP_USE_X87CONTROL 0
126 #if KMP_OS_WINDOWS
127 #define KMP_END_OF_LINE "\r\n"
128 typedef char kmp_int8;
129 typedef unsigned char kmp_uint8;
130 typedef short kmp_int16;
131 typedef unsigned short kmp_uint16;
132 typedef int kmp_int32;
133 typedef unsigned int kmp_uint32;
134 #define KMP_INT32_SPEC "d"
135 #define KMP_UINT32_SPEC "u"
136 #ifndef KMP_STRUCT64
137 typedef __int64 kmp_int64;
138 typedef unsigned __int64 kmp_uint64;
139 #define KMP_INT64_SPEC "I64d"
140 #define KMP_UINT64_SPEC "I64u"
141 #else
142 struct kmp_struct64 {
143  kmp_int32 a, b;
144 };
145 typedef struct kmp_struct64 kmp_int64;
146 typedef struct kmp_struct64 kmp_uint64;
147 /* Not sure what to use for KMP_[U]INT64_SPEC here */
148 #endif
149 #if KMP_ARCH_X86 && KMP_MSVC_COMPAT
150 #undef KMP_USE_X87CONTROL
151 #define KMP_USE_X87CONTROL 1
152 #endif
153 #if KMP_ARCH_X86_64 || KMP_ARCH_AARCH64
154 #define KMP_INTPTR 1
155 typedef __int64 kmp_intptr_t;
156 typedef unsigned __int64 kmp_uintptr_t;
157 #define KMP_INTPTR_SPEC "I64d"
158 #define KMP_UINTPTR_SPEC "I64u"
159 #endif
160 #endif /* KMP_OS_WINDOWS */
161 
162 #if KMP_OS_UNIX
163 #define KMP_END_OF_LINE "\n"
164 typedef char kmp_int8;
165 typedef unsigned char kmp_uint8;
166 typedef short kmp_int16;
167 typedef unsigned short kmp_uint16;
168 typedef int kmp_int32;
169 typedef unsigned int kmp_uint32;
170 typedef long long kmp_int64;
171 typedef unsigned long long kmp_uint64;
172 #define KMP_INT32_SPEC "d"
173 #define KMP_UINT32_SPEC "u"
174 #define KMP_INT64_SPEC "lld"
175 #define KMP_UINT64_SPEC "llu"
176 #endif /* KMP_OS_UNIX */
177 
178 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS
179 #define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
180 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || \
181  KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64
182 #define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
183 #else
184 #error "Can't determine size_t printf format specifier."
185 #endif
186 
187 #if KMP_ARCH_X86
188 #define KMP_SIZE_T_MAX (0xFFFFFFFF)
189 #else
190 #define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
191 #endif
192 
193 typedef size_t kmp_size_t;
194 typedef float kmp_real32;
195 typedef double kmp_real64;
196 
197 #ifndef KMP_INTPTR
198 #define KMP_INTPTR 1
199 typedef long kmp_intptr_t;
200 typedef unsigned long kmp_uintptr_t;
201 #define KMP_INTPTR_SPEC "ld"
202 #define KMP_UINTPTR_SPEC "lu"
203 #endif
204 
205 #ifdef BUILD_I8
206 typedef kmp_int64 kmp_int;
207 typedef kmp_uint64 kmp_uint;
208 #else
209 typedef kmp_int32 kmp_int;
210 typedef kmp_uint32 kmp_uint;
211 #endif /* BUILD_I8 */
212 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
213 #define KMP_INT_MIN ((kmp_int32)0x80000000)
214 
215 // stdarg handling
216 #if (KMP_ARCH_ARM || KMP_ARCH_X86_64 || KMP_ARCH_AARCH64) && \
217  (KMP_OS_FREEBSD || KMP_OS_LINUX)
218 typedef va_list *kmp_va_list;
219 #define kmp_va_deref(ap) (*(ap))
220 #define kmp_va_addr_of(ap) (&(ap))
221 #else
222 typedef va_list kmp_va_list;
223 #define kmp_va_deref(ap) (ap)
224 #define kmp_va_addr_of(ap) (ap)
225 #endif
226 
227 #ifdef __cplusplus
228 // macros to cast out qualifiers and to re-interpret types
229 #define CCAST(type, var) const_cast<type>(var)
230 #define RCAST(type, var) reinterpret_cast<type>(var)
231 //-------------------------------------------------------------------------
232 // template for debug prints specification ( d, u, lld, llu ), and to obtain
233 // signed/unsigned flavors of a type
234 template <typename T> struct traits_t {};
235 // int
236 template <> struct traits_t<signed int> {
237  typedef signed int signed_t;
238  typedef unsigned int unsigned_t;
239  typedef double floating_t;
240  static char const *spec;
241  static const signed_t max_value = 0x7fffffff;
242  static const signed_t min_value = 0x80000000;
243  static const int type_size = sizeof(signed_t);
244 };
245 // unsigned int
246 template <> struct traits_t<unsigned int> {
247  typedef signed int signed_t;
248  typedef unsigned int unsigned_t;
249  typedef double floating_t;
250  static char const *spec;
251  static const unsigned_t max_value = 0xffffffff;
252  static const unsigned_t min_value = 0x00000000;
253  static const int type_size = sizeof(unsigned_t);
254 };
255 // long
256 template <> struct traits_t<signed long> {
257  typedef signed long signed_t;
258  typedef unsigned long unsigned_t;
259  typedef long double floating_t;
260  static char const *spec;
261  static const int type_size = sizeof(signed_t);
262 };
263 // long long
264 template <> struct traits_t<signed long long> {
265  typedef signed long long signed_t;
266  typedef unsigned long long unsigned_t;
267  typedef long double floating_t;
268  static char const *spec;
269  static const signed_t max_value = 0x7fffffffffffffffLL;
270  static const signed_t min_value = 0x8000000000000000LL;
271  static const int type_size = sizeof(signed_t);
272 };
273 // unsigned long long
274 template <> struct traits_t<unsigned long long> {
275  typedef signed long long signed_t;
276  typedef unsigned long long unsigned_t;
277  typedef long double floating_t;
278  static char const *spec;
279  static const unsigned_t max_value = 0xffffffffffffffffLL;
280  static const unsigned_t min_value = 0x0000000000000000LL;
281  static const int type_size = sizeof(unsigned_t);
282 };
283 //-------------------------------------------------------------------------
284 #else
285 #define CCAST(type, var) (type)(var)
286 #define RCAST(type, var) (type)(var)
287 #endif // __cplusplus
288 
289 #define KMP_EXPORT extern /* export declaration in guide libraries */
290 
291 #if __GNUC__ >= 4 && !defined(__MINGW32__)
292 #define __forceinline __inline
293 #endif
294 
295 /* Check if the OS/arch can support user-level mwait */
296 // All mwait code tests for UMWAIT first, so it should only fall back to ring3
297 // MWAIT for KNL.
298 #define KMP_HAVE_MWAIT \
299  ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) && \
300  !KMP_MIC2)
301 #define KMP_HAVE_UMWAIT \
302  ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) && \
303  !KMP_MIC)
304 
305 #if KMP_OS_WINDOWS
306 #include <windows.h>
307 
308 static inline int KMP_GET_PAGE_SIZE(void) {
309  SYSTEM_INFO si;
310  GetSystemInfo(&si);
311  return si.dwPageSize;
312 }
313 #else
314 #define KMP_GET_PAGE_SIZE() getpagesize()
315 #endif
316 
317 #define PAGE_ALIGNED(_addr) \
318  (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
319 #define ALIGN_TO_PAGE(x) \
320  (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
321 
322 /* ---------- Support for cache alignment, padding, etc. ----------------*/
323 
324 #ifdef __cplusplus
325 extern "C" {
326 #endif // __cplusplus
327 
328 #define INTERNODE_CACHE_LINE 4096 /* for multi-node systems */
329 
330 /* Define the default size of the cache line */
331 #ifndef CACHE_LINE
332 #define CACHE_LINE 128 /* cache line size in bytes */
333 #else
334 #if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
335 // 2006-02-13: This produces too many warnings on OS X*. Disable for now
336 #warning CACHE_LINE is too small.
337 #endif
338 #endif /* CACHE_LINE */
339 
340 #define KMP_CACHE_PREFETCH(ADDR) /* nothing */
341 
342 // Define attribute that indicates that the fall through from the previous
343 // case label is intentional and should not be diagnosed by a compiler
344 // Code from libcxx/include/__config
345 // Use a function like macro to imply that it must be followed by a semicolon
346 #if __cplusplus > 201402L && __has_cpp_attribute(fallthrough)
347 #define KMP_FALLTHROUGH() [[fallthrough]]
348 #elif __has_cpp_attribute(clang::fallthrough)
349 #define KMP_FALLTHROUGH() [[clang::fallthrough]]
350 #elif __has_attribute(fallthrough) || __GNUC__ >= 7
351 #define KMP_FALLTHROUGH() __attribute__((__fallthrough__))
352 #else
353 #define KMP_FALLTHROUGH() ((void)0)
354 #endif
355 
356 #if KMP_HAVE_ATTRIBUTE_WAITPKG
357 #define KMP_ATTRIBUTE_TARGET_WAITPKG __attribute__((target("waitpkg")))
358 #else
359 #define KMP_ATTRIBUTE_TARGET_WAITPKG /* Nothing */
360 #endif
361 
362 #if KMP_HAVE_ATTRIBUTE_RTM
363 #define KMP_ATTRIBUTE_TARGET_RTM __attribute__((target("rtm")))
364 #else
365 #define KMP_ATTRIBUTE_TARGET_RTM /* Nothing */
366 #endif
367 
368 // Define attribute that indicates a function does not return
369 #if __cplusplus >= 201103L
370 #define KMP_NORETURN [[noreturn]]
371 #elif KMP_OS_WINDOWS
372 #define KMP_NORETURN __declspec(noreturn)
373 #else
374 #define KMP_NORETURN __attribute__((noreturn))
375 #endif
376 
377 #if KMP_OS_WINDOWS && KMP_MSVC_COMPAT
378 #define KMP_ALIGN(bytes) __declspec(align(bytes))
379 #define KMP_THREAD_LOCAL __declspec(thread)
380 #define KMP_ALIAS /* Nothing */
381 #else
382 #define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
383 #define KMP_THREAD_LOCAL __thread
384 #define KMP_ALIAS(alias_of) __attribute__((alias(alias_of)))
385 #endif
386 
387 #if KMP_HAVE_WEAK_ATTRIBUTE && !KMP_DYNAMIC_LIB
388 #define KMP_WEAK_ATTRIBUTE_EXTERNAL __attribute__((weak))
389 #else
390 #define KMP_WEAK_ATTRIBUTE_EXTERNAL /* Nothing */
391 #endif
392 
393 #if KMP_HAVE_WEAK_ATTRIBUTE
394 #define KMP_WEAK_ATTRIBUTE_INTERNAL __attribute__((weak))
395 #else
396 #define KMP_WEAK_ATTRIBUTE_INTERNAL /* Nothing */
397 #endif
398 
399 // Define KMP_VERSION_SYMBOL and KMP_EXPAND_NAME
400 #ifndef KMP_STR
401 #define KMP_STR(x) _KMP_STR(x)
402 #define _KMP_STR(x) #x
403 #endif
404 
405 #ifdef KMP_USE_VERSION_SYMBOLS
406 // If using versioned symbols, KMP_EXPAND_NAME prepends
407 // __kmp_api_ to the real API name
408 #define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name)
409 #define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name
410 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) \
411  _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION")
412 #define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver) \
413  __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias \
414  __attribute__((alias(KMP_STR(__kmp_api_##api_name)))); \
415  __asm__( \
416  ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \
417  api_name) "@" ver_str "\n\t"); \
418  __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR( \
419  api_name) "@@" default_ver "\n\t")
420 
421 #define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str) \
422  _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str, "VERSION")
423 #define _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str, \
424  default_ver) \
425  __typeof__(__kmp_api_##apic_name) __kmp_api_##apic_name##_##ver_num##_alias \
426  __attribute__((alias(KMP_STR(__kmp_api_##apic_name)))); \
427  __asm__(".symver " KMP_STR(__kmp_api_##apic_name) "," KMP_STR( \
428  apic_name) "@@" default_ver "\n\t"); \
429  __asm__( \
430  ".symver " KMP_STR(__kmp_api_##apic_name##_##ver_num##_alias) "," KMP_STR( \
431  api_name) "@" ver_str "\n\t")
432 
433 #else // KMP_USE_VERSION_SYMBOLS
434 #define KMP_EXPAND_NAME(api_name) api_name
435 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) /* Nothing */
436 #define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, \
437  ver_str) /* Nothing */
438 #endif // KMP_USE_VERSION_SYMBOLS
439 
440 /* Temporary note: if performance testing of this passes, we can remove
441  all references to KMP_DO_ALIGN and replace with KMP_ALIGN. */
442 #define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes)
443 #define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE)
444 #define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE)
445 
446 /* General purpose fence types for memory operations */
447 enum kmp_mem_fence_type {
448  kmp_no_fence, /* No memory fence */
449  kmp_acquire_fence, /* Acquire (read) memory fence */
450  kmp_release_fence, /* Release (write) memory fence */
451  kmp_full_fence /* Full (read+write) memory fence */
452 };
453 
454 // Synchronization primitives
455 
456 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS
457 
458 #if KMP_MSVC_COMPAT && !KMP_COMPILER_CLANG
459 #pragma intrinsic(InterlockedExchangeAdd)
460 #pragma intrinsic(InterlockedCompareExchange)
461 #pragma intrinsic(InterlockedExchange)
462 #if !(KMP_COMPILER_ICX && KMP_32_BIT_ARCH)
463 #pragma intrinsic(InterlockedExchange64)
464 #endif
465 #endif
466 
467 // Using InterlockedIncrement / InterlockedDecrement causes a library loading
468 // ordering problem, so we use InterlockedExchangeAdd instead.
469 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
470 #define KMP_TEST_THEN_INC_ACQ32(p) \
471  InterlockedExchangeAdd((volatile long *)(p), 1)
472 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
473 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
474  InterlockedExchangeAdd((volatile long *)(p), 4)
475 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
476 #define KMP_TEST_THEN_DEC_ACQ32(p) \
477  InterlockedExchangeAdd((volatile long *)(p), -1)
478 #define KMP_TEST_THEN_ADD32(p, v) \
479  InterlockedExchangeAdd((volatile long *)(p), (v))
480 
481 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
482  InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
483 
484 #define KMP_XCHG_FIXED32(p, v) \
485  InterlockedExchange((volatile long *)(p), (long)(v))
486 #define KMP_XCHG_FIXED64(p, v) \
487  InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
488 
489 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
490  kmp_int32 tmp = InterlockedExchange((volatile long *)p, *(long *)&v);
491  return *(kmp_real32 *)&tmp;
492 }
493 
494 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
495 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
496 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
497 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
498 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
499 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
500 
501 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
502 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
503 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
504 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
505 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
506 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
507 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
508 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
509 
510 #if KMP_ARCH_AARCH64 && KMP_COMPILER_MSVC && !KMP_COMPILER_CLANG
511 #define KMP_TEST_THEN_INC64(p) _InterlockedExchangeAdd64((p), 1LL)
512 #define KMP_TEST_THEN_INC_ACQ64(p) _InterlockedExchangeAdd64_acq((p), 1LL)
513 #define KMP_TEST_THEN_ADD4_64(p) _InterlockedExchangeAdd64((p), 4LL)
514 // #define KMP_TEST_THEN_ADD4_ACQ64(p) _InterlockedExchangeAdd64_acq((p), 4LL)
515 // #define KMP_TEST_THEN_DEC64(p) _InterlockedExchangeAdd64((p), -1LL)
516 // #define KMP_TEST_THEN_DEC_ACQ64(p) _InterlockedExchangeAdd64_acq((p), -1LL)
517 // #define KMP_TEST_THEN_ADD8(p, v) _InterlockedExchangeAdd8((p), (v))
518 #define KMP_TEST_THEN_ADD64(p, v) _InterlockedExchangeAdd64((p), (v))
519 
520 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
521  __kmp_compare_and_store_acq8((p), (cv), (sv))
522 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
523  __kmp_compare_and_store_rel8((p), (cv), (sv))
524 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
525  __kmp_compare_and_store_acq16((p), (cv), (sv))
526 /*
527 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
528  __kmp_compare_and_store_rel16((p), (cv), (sv))
529 */
530 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
531  __kmp_compare_and_store_acq32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
532  (kmp_int32)(sv))
533 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
534  __kmp_compare_and_store_rel32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
535  (kmp_int32)(sv))
536 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
537  __kmp_compare_and_store_acq64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
538  (kmp_int64)(sv))
539 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
540  __kmp_compare_and_store_rel64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
541  (kmp_int64)(sv))
542 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
543  __kmp_compare_and_store_ptr((void *volatile *)(p), (void *)(cv), (void *)(sv))
544 
545 // KMP_COMPARE_AND_STORE expects this order: pointer, compare, exchange
546 // _InterlockedCompareExchange expects this order: pointer, exchange, compare
547 // KMP_COMPARE_AND_STORE also returns a bool indicating a successful write. A
548 // write is successful if the return value of _InterlockedCompareExchange is the
549 // same as the compare value.
550 inline kmp_int8 __kmp_compare_and_store_acq8(volatile kmp_int8 *p, kmp_int8 cv,
551  kmp_int8 sv) {
552  return _InterlockedCompareExchange8_acq(p, sv, cv) == cv;
553 }
554 
555 inline kmp_int8 __kmp_compare_and_store_rel8(volatile kmp_int8 *p, kmp_int8 cv,
556  kmp_int8 sv) {
557  return _InterlockedCompareExchange8_rel(p, sv, cv) == cv;
558 }
559 
560 inline kmp_int16 __kmp_compare_and_store_acq16(volatile kmp_int16 *p,
561  kmp_int16 cv, kmp_int16 sv) {
562  return _InterlockedCompareExchange16_acq(p, sv, cv) == cv;
563 }
564 
565 inline kmp_int16 __kmp_compare_and_store_rel16(volatile kmp_int16 *p,
566  kmp_int16 cv, kmp_int16 sv) {
567  return _InterlockedCompareExchange16_rel(p, sv, cv) == cv;
568 }
569 
570 inline kmp_int32 __kmp_compare_and_store_acq32(volatile kmp_int32 *p,
571  kmp_int32 cv, kmp_int32 sv) {
572  return _InterlockedCompareExchange_acq((volatile long *)p, sv, cv) == cv;
573 }
574 
575 inline kmp_int32 __kmp_compare_and_store_rel32(volatile kmp_int32 *p,
576  kmp_int32 cv, kmp_int32 sv) {
577  return _InterlockedCompareExchange_rel((volatile long *)p, sv, cv) == cv;
578 }
579 
580 inline kmp_int32 __kmp_compare_and_store_acq64(volatile kmp_int64 *p,
581  kmp_int64 cv, kmp_int64 sv) {
582  return _InterlockedCompareExchange64_acq(p, sv, cv) == cv;
583 }
584 
585 inline kmp_int32 __kmp_compare_and_store_rel64(volatile kmp_int64 *p,
586  kmp_int64 cv, kmp_int64 sv) {
587  return _InterlockedCompareExchange64_rel(p, sv, cv) == cv;
588 }
589 
590 inline kmp_int32 __kmp_compare_and_store_ptr(void *volatile *p, void *cv,
591  void *sv) {
592  return _InterlockedCompareExchangePointer(p, sv, cv) == cv;
593 }
594 
595 // The _RET versions return the value instead of a bool
596 /*
597 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
598  _InterlockedCompareExchange8((p), (sv), (cv))
599 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
600  _InterlockedCompareExchange16((p), (sv), (cv))
601 */
602 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
603  _InterlockedCompareExchange64((volatile kmp_int64 *)(p), (kmp_int64)(sv), \
604  (kmp_int64)(cv))
605 
606 /*
607 #define KMP_XCHG_FIXED8(p, v) \
608  _InterlockedExchange8((volatile kmp_int8 *)(p), (kmp_int8)(v));
609 */
610 // #define KMP_XCHG_FIXED16(p, v) _InterlockedExchange16((p), (v));
611 // #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v)));
612 
613 // inline kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v) {
614 // kmp_int64 tmp = _InterlockedExchange64((volatile kmp_int64 *)p, *(kmp_int64
615 // *)&v); return *(kmp_real64 *)&tmp;
616 // }
617 
618 #else // !KMP_ARCH_AARCH64
619 
620 // Routines that we still need to implement in assembly.
621 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
622 
623 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
624  kmp_int8 sv);
625 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
626  kmp_int16 sv);
627 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
628  kmp_int32 sv);
629 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
630  kmp_int64 sv);
631 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
632  kmp_int8 sv);
633 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
634  kmp_int16 cv, kmp_int16 sv);
635 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
636  kmp_int32 cv, kmp_int32 sv);
637 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
638  kmp_int64 cv, kmp_int64 sv);
639 
640 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
641 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
642 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
643 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
644 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
645 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
646 
647 //#define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32((p), 1)
648 //#define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32((p), 1)
649 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
650 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
651 //#define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32((p), 4)
652 //#define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32((p), 4)
653 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
654 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
655 //#define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32((p), -1)
656 //#define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32((p), -1)
657 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
658 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
659 //#define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32((p), (v))
660 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
661 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
662 
663 
664 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
665  __kmp_compare_and_store8((p), (cv), (sv))
666 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
667  __kmp_compare_and_store8((p), (cv), (sv))
668 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
669  __kmp_compare_and_store16((p), (cv), (sv))
670 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
671  __kmp_compare_and_store16((p), (cv), (sv))
672 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
673  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
674  (kmp_int32)(sv))
675 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
676  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
677  (kmp_int32)(sv))
678 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
679  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
680  (kmp_int64)(sv))
681 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
682  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
683  (kmp_int64)(sv))
684 
685 #if KMP_ARCH_X86
686 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
687  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
688  (kmp_int32)(sv))
689 #else /* 64 bit pointers */
690 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
691  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
692  (kmp_int64)(sv))
693 #endif /* KMP_ARCH_X86 */
694 
695 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
696  __kmp_compare_and_store_ret8((p), (cv), (sv))
697 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
698  __kmp_compare_and_store_ret16((p), (cv), (sv))
699 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
700  __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
701  (kmp_int64)(sv))
702 
703 #define KMP_XCHG_FIXED8(p, v) \
704  __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
705 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
706 //#define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
707 //#define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
708 //#define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
709 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
710 #endif
711 
712 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
713 
714 /* cast p to correct type so that proper intrinsic will be used */
715 #define KMP_TEST_THEN_INC32(p) \
716  __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
717 #define KMP_TEST_THEN_INC_ACQ32(p) \
718  __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
719 #if KMP_ARCH_MIPS
720 #define KMP_TEST_THEN_INC64(p) \
721  __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
722 #define KMP_TEST_THEN_INC_ACQ64(p) \
723  __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
724 #else
725 #define KMP_TEST_THEN_INC64(p) \
726  __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
727 #define KMP_TEST_THEN_INC_ACQ64(p) \
728  __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
729 #endif
730 #define KMP_TEST_THEN_ADD4_32(p) \
731  __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
732 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
733  __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
734 #if KMP_ARCH_MIPS
735 #define KMP_TEST_THEN_ADD4_64(p) \
736  __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
737 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
738  __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
739 #define KMP_TEST_THEN_DEC64(p) \
740  __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
741 #define KMP_TEST_THEN_DEC_ACQ64(p) \
742  __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
743 #else
744 #define KMP_TEST_THEN_ADD4_64(p) \
745  __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
746 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
747  __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
748 #define KMP_TEST_THEN_DEC64(p) \
749  __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
750 #define KMP_TEST_THEN_DEC_ACQ64(p) \
751  __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
752 #endif
753 #define KMP_TEST_THEN_DEC32(p) \
754  __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
755 #define KMP_TEST_THEN_DEC_ACQ32(p) \
756  __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
757 #define KMP_TEST_THEN_ADD8(p, v) \
758  __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
759 #define KMP_TEST_THEN_ADD32(p, v) \
760  __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
761 #if KMP_ARCH_MIPS
762 #define KMP_TEST_THEN_ADD64(p, v) \
763  __atomic_fetch_add((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
764  __ATOMIC_SEQ_CST)
765 #else
766 #define KMP_TEST_THEN_ADD64(p, v) \
767  __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
768 #endif
769 
770 #define KMP_TEST_THEN_OR8(p, v) \
771  __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
772 #define KMP_TEST_THEN_AND8(p, v) \
773  __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
774 #define KMP_TEST_THEN_OR32(p, v) \
775  __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
776 #define KMP_TEST_THEN_AND32(p, v) \
777  __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
778 #if KMP_ARCH_MIPS
779 #define KMP_TEST_THEN_OR64(p, v) \
780  __atomic_fetch_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
781  __ATOMIC_SEQ_CST)
782 #define KMP_TEST_THEN_AND64(p, v) \
783  __atomic_fetch_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
784  __ATOMIC_SEQ_CST)
785 #else
786 #define KMP_TEST_THEN_OR64(p, v) \
787  __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
788 #define KMP_TEST_THEN_AND64(p, v) \
789  __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
790 #endif
791 
792 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
793  __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
794  (kmp_uint8)(sv))
795 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
796  __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
797  (kmp_uint8)(sv))
798 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
799  __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
800  (kmp_uint16)(sv))
801 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
802  __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
803  (kmp_uint16)(sv))
804 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
805  __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
806  (kmp_uint32)(sv))
807 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
808  __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
809  (kmp_uint32)(sv))
810 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
811  __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv), \
812  (void *)(sv))
813 
814 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
815  __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
816  (kmp_uint8)(sv))
817 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
818  __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
819  (kmp_uint16)(sv))
820 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
821  __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
822  (kmp_uint32)(sv))
823 #if KMP_ARCH_MIPS
824 static inline bool mips_sync_bool_compare_and_swap(volatile kmp_uint64 *p,
825  kmp_uint64 cv,
826  kmp_uint64 sv) {
827  return __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
828  __ATOMIC_SEQ_CST);
829 }
830 static inline bool mips_sync_val_compare_and_swap(volatile kmp_uint64 *p,
831  kmp_uint64 cv,
832  kmp_uint64 sv) {
833  __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
834  __ATOMIC_SEQ_CST);
835  return cv;
836 }
837 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
838  mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
839  (kmp_uint64)(cv), (kmp_uint64)(sv))
840 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
841  mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
842  (kmp_uint64)(cv), (kmp_uint64)(sv))
843 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
844  mips_sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
845  (kmp_uint64)(sv))
846 #else
847 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
848  __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
849  (kmp_uint64)(sv))
850 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
851  __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
852  (kmp_uint64)(sv))
853 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
854  __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
855  (kmp_uint64)(sv))
856 #endif
857 
858 #if KMP_OS_DARWIN && defined(__INTEL_COMPILER) && __INTEL_COMPILER >= 1800
859 #define KMP_XCHG_FIXED8(p, v) \
860  __atomic_exchange_1((volatile kmp_uint8 *)(p), (kmp_uint8)(v), \
861  __ATOMIC_SEQ_CST)
862 #else
863 #define KMP_XCHG_FIXED8(p, v) \
864  __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
865 #endif
866 #define KMP_XCHG_FIXED16(p, v) \
867  __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
868 #define KMP_XCHG_FIXED32(p, v) \
869  __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
870 #define KMP_XCHG_FIXED64(p, v) \
871  __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
872 
873 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
874  volatile kmp_uint32 *up;
875  kmp_uint32 uv;
876  memcpy(&up, &p, sizeof(up));
877  memcpy(&uv, &v, sizeof(uv));
878  kmp_int32 tmp = __sync_lock_test_and_set(up, uv);
879  kmp_real32 ftmp;
880  memcpy(&ftmp, &tmp, sizeof(tmp));
881  return ftmp;
882 }
883 
884 inline kmp_real64 KMP_XCHG_REAL64(volatile kmp_real64 *p, kmp_real64 v) {
885  volatile kmp_uint64 *up;
886  kmp_uint64 uv;
887  memcpy(&up, &p, sizeof(up));
888  memcpy(&uv, &v, sizeof(uv));
889  kmp_int64 tmp = __sync_lock_test_and_set(up, uv);
890  kmp_real64 dtmp;
891  memcpy(&dtmp, &tmp, sizeof(tmp));
892  return dtmp;
893 }
894 
895 #else
896 
897 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
898 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
899 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
900 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
901 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
902 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
903 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
904 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
905 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
906 
907 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
908  kmp_int8 sv);
909 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
910  kmp_int16 sv);
911 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
912  kmp_int32 sv);
913 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
914  kmp_int64 sv);
915 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
916  kmp_int8 sv);
917 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
918  kmp_int16 cv, kmp_int16 sv);
919 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
920  kmp_int32 cv, kmp_int32 sv);
921 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
922  kmp_int64 cv, kmp_int64 sv);
923 
924 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
925 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
926 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
927 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
928 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
929 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
930 
931 #define KMP_TEST_THEN_INC32(p) \
932  __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
933 #define KMP_TEST_THEN_INC_ACQ32(p) \
934  __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
935 #define KMP_TEST_THEN_INC64(p) \
936  __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
937 #define KMP_TEST_THEN_INC_ACQ64(p) \
938  __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
939 #define KMP_TEST_THEN_ADD4_32(p) \
940  __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
941 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
942  __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
943 #define KMP_TEST_THEN_ADD4_64(p) \
944  __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
945 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
946  __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
947 #define KMP_TEST_THEN_DEC32(p) \
948  __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
949 #define KMP_TEST_THEN_DEC_ACQ32(p) \
950  __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
951 #define KMP_TEST_THEN_DEC64(p) \
952  __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
953 #define KMP_TEST_THEN_DEC_ACQ64(p) \
954  __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
955 #define KMP_TEST_THEN_ADD8(p, v) \
956  __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
957 #define KMP_TEST_THEN_ADD32(p, v) \
958  __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
959 #define KMP_TEST_THEN_ADD64(p, v) \
960  __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
961 
962 #define KMP_TEST_THEN_OR8(p, v) \
963  __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
964 #define KMP_TEST_THEN_AND8(p, v) \
965  __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
966 #define KMP_TEST_THEN_OR32(p, v) \
967  __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
968 #define KMP_TEST_THEN_AND32(p, v) \
969  __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
970 #define KMP_TEST_THEN_OR64(p, v) \
971  __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
972 #define KMP_TEST_THEN_AND64(p, v) \
973  __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
974 
975 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
976  __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
977  (kmp_int8)(sv))
978 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
979  __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
980  (kmp_int8)(sv))
981 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
982  __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
983  (kmp_int16)(sv))
984 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
985  __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
986  (kmp_int16)(sv))
987 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
988  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
989  (kmp_int32)(sv))
990 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
991  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
992  (kmp_int32)(sv))
993 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
994  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
995  (kmp_int64)(sv))
996 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
997  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
998  (kmp_int64)(sv))
999 
1000 #if KMP_ARCH_X86
1001 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
1002  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1003  (kmp_int32)(sv))
1004 #else /* 64 bit pointers */
1005 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
1006  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1007  (kmp_int64)(sv))
1008 #endif /* KMP_ARCH_X86 */
1009 
1010 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
1011  __kmp_compare_and_store_ret8((p), (cv), (sv))
1012 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
1013  __kmp_compare_and_store_ret16((p), (cv), (sv))
1014 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
1015  __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1016  (kmp_int32)(sv))
1017 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
1018  __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1019  (kmp_int64)(sv))
1020 
1021 #define KMP_XCHG_FIXED8(p, v) \
1022  __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
1023 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
1024 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
1025 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
1026 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
1027 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
1028 
1029 #endif /* KMP_ASM_INTRINS */
1030 
1031 /* ------------- relaxed consistency memory model stuff ------------------ */
1032 
1033 #if KMP_OS_WINDOWS
1034 #ifdef __ABSOFT_WIN
1035 #define KMP_MB() asm("nop")
1036 #define KMP_IMB() asm("nop")
1037 #else
1038 #define KMP_MB() /* _asm{ nop } */
1039 #define KMP_IMB() /* _asm{ nop } */
1040 #endif
1041 #endif /* KMP_OS_WINDOWS */
1042 
1043 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || \
1044  KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64
1045 #if KMP_OS_WINDOWS
1046 #undef KMP_MB
1047 #define KMP_MB() std::atomic_thread_fence(std::memory_order_seq_cst)
1048 #else /* !KMP_OS_WINDOWS */
1049 #define KMP_MB() __sync_synchronize()
1050 #endif
1051 #endif
1052 
1053 #ifndef KMP_MB
1054 #define KMP_MB() /* nothing to do */
1055 #endif
1056 
1057 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
1058 #if KMP_COMPILER_ICC || KMP_COMPILER_ICX
1059 #define KMP_MFENCE_() _mm_mfence()
1060 #define KMP_SFENCE_() _mm_sfence()
1061 #elif KMP_COMPILER_MSVC
1062 #define KMP_MFENCE_() MemoryBarrier()
1063 #define KMP_SFENCE_() MemoryBarrier()
1064 #else
1065 #define KMP_MFENCE_() __sync_synchronize()
1066 #define KMP_SFENCE_() __sync_synchronize()
1067 #endif
1068 #define KMP_MFENCE() \
1069  if (UNLIKELY(!__kmp_cpuinfo.initialized)) { \
1070  __kmp_query_cpuid(&__kmp_cpuinfo); \
1071  } \
1072  if (__kmp_cpuinfo.flags.sse2) { \
1073  KMP_MFENCE_(); \
1074  }
1075 #define KMP_SFENCE() KMP_SFENCE_()
1076 #else
1077 #define KMP_MFENCE() KMP_MB()
1078 #define KMP_SFENCE() KMP_MB()
1079 #endif
1080 
1081 #ifndef KMP_IMB
1082 #define KMP_IMB() /* nothing to do */
1083 #endif
1084 
1085 #ifndef KMP_ST_REL32
1086 #define KMP_ST_REL32(A, D) (*(A) = (D))
1087 #endif
1088 
1089 #ifndef KMP_ST_REL64
1090 #define KMP_ST_REL64(A, D) (*(A) = (D))
1091 #endif
1092 
1093 #ifndef KMP_LD_ACQ32
1094 #define KMP_LD_ACQ32(A) (*(A))
1095 #endif
1096 
1097 #ifndef KMP_LD_ACQ64
1098 #define KMP_LD_ACQ64(A) (*(A))
1099 #endif
1100 
1101 /* ------------------------------------------------------------------------ */
1102 // FIXME - maybe this should this be
1103 //
1104 // #define TCR_4(a) (*(volatile kmp_int32 *)(&a))
1105 // #define TCW_4(a,b) (a) = (*(volatile kmp_int32 *)&(b))
1106 //
1107 // #define TCR_8(a) (*(volatile kmp_int64 *)(a))
1108 // #define TCW_8(a,b) (a) = (*(volatile kmp_int64 *)(&b))
1109 //
1110 // I'm fairly certain this is the correct thing to do, but I'm afraid
1111 // of performance regressions.
1112 
1113 #define TCR_1(a) (a)
1114 #define TCW_1(a, b) (a) = (b)
1115 #define TCR_4(a) (a)
1116 #define TCW_4(a, b) (a) = (b)
1117 #define TCI_4(a) (++(a))
1118 #define TCD_4(a) (--(a))
1119 #define TCR_8(a) (a)
1120 #define TCW_8(a, b) (a) = (b)
1121 #define TCI_8(a) (++(a))
1122 #define TCD_8(a) (--(a))
1123 #define TCR_SYNC_4(a) (a)
1124 #define TCW_SYNC_4(a, b) (a) = (b)
1125 #define TCX_SYNC_4(a, b, c) \
1126  KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), \
1127  (kmp_int32)(b), (kmp_int32)(c))
1128 #define TCR_SYNC_8(a) (a)
1129 #define TCW_SYNC_8(a, b) (a) = (b)
1130 #define TCX_SYNC_8(a, b, c) \
1131  KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), \
1132  (kmp_int64)(b), (kmp_int64)(c))
1133 
1134 #if KMP_ARCH_X86 || KMP_ARCH_MIPS
1135 // What about ARM?
1136 #define TCR_PTR(a) ((void *)TCR_4(a))
1137 #define TCW_PTR(a, b) TCW_4((a), (b))
1138 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
1139 #define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b))
1140 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c)))
1141 
1142 #else /* 64 bit pointers */
1143 
1144 #define TCR_PTR(a) ((void *)TCR_8(a))
1145 #define TCW_PTR(a, b) TCW_8((a), (b))
1146 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
1147 #define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b))
1148 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c)))
1149 
1150 #endif /* KMP_ARCH_X86 */
1151 
1152 /* If these FTN_{TRUE,FALSE} values change, may need to change several places
1153  where they are used to check that language is Fortran, not C. */
1154 
1155 #ifndef FTN_TRUE
1156 #define FTN_TRUE TRUE
1157 #endif
1158 
1159 #ifndef FTN_FALSE
1160 #define FTN_FALSE FALSE
1161 #endif
1162 
1163 typedef void (*microtask_t)(int *gtid, int *npr, ...);
1164 
1165 #ifdef USE_VOLATILE_CAST
1166 #define VOLATILE_CAST(x) (volatile x)
1167 #else
1168 #define VOLATILE_CAST(x) (x)
1169 #endif
1170 
1171 #define KMP_WAIT __kmp_wait_4
1172 #define KMP_WAIT_PTR __kmp_wait_4_ptr
1173 #define KMP_EQ __kmp_eq_4
1174 #define KMP_NEQ __kmp_neq_4
1175 #define KMP_LT __kmp_lt_4
1176 #define KMP_GE __kmp_ge_4
1177 #define KMP_LE __kmp_le_4
1178 
1179 /* Workaround for Intel(R) 64 code gen bug when taking address of static array
1180  * (Intel(R) 64 Tracker #138) */
1181 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
1182 #define STATIC_EFI2_WORKAROUND
1183 #else
1184 #define STATIC_EFI2_WORKAROUND static
1185 #endif
1186 
1187 // Support of BGET usage
1188 #ifndef KMP_USE_BGET
1189 #define KMP_USE_BGET 1
1190 #endif
1191 
1192 // Switches for OSS builds
1193 #ifndef USE_CMPXCHG_FIX
1194 #define USE_CMPXCHG_FIX 1
1195 #endif
1196 
1197 // Enable dynamic user lock
1198 #define KMP_USE_DYNAMIC_LOCK 1
1199 
1200 // Enable Intel(R) Transactional Synchronization Extensions (Intel(R) TSX) if
1201 // dynamic user lock is turned on
1202 #if KMP_USE_DYNAMIC_LOCK
1203 // Visual studio can't handle the asm sections in this code
1204 #define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC
1205 #ifdef KMP_USE_ADAPTIVE_LOCKS
1206 #undef KMP_USE_ADAPTIVE_LOCKS
1207 #endif
1208 #define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX
1209 #endif
1210 
1211 // Enable tick time conversion of ticks to seconds
1212 #if KMP_STATS_ENABLED
1213 #define KMP_HAVE_TICK_TIME \
1214  (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64))
1215 #endif
1216 
1217 // Warning levels
1218 enum kmp_warnings_level {
1219  kmp_warnings_off = 0, /* No warnings */
1220  kmp_warnings_low, /* Minimal warnings (default) */
1221  kmp_warnings_explicit = 6, /* Explicitly set to ON - more warnings */
1222  kmp_warnings_verbose /* reserved */
1223 };
1224 
1225 #ifdef __cplusplus
1226 } // extern "C"
1227 #endif // __cplusplus
1228 
1229 // Safe C API
1230 #include "kmp_safe_c_api.h"
1231 
1232 // Macros for C++11 atomic functions
1233 #define KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order)
1234 #define KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order)
1235 
1236 // For non-default load/store
1237 #define KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire)
1238 #define KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed)
1239 #define KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release)
1240 #define KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed)
1241 
1242 // For non-default fetch_<op>
1243 #define KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel)
1244 #define KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel)
1245 #define KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel)
1246 #define KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel)
1247 #define KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel)
1248 #define KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel)
1249 #define KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed)
1250 #define KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed)
1251 
1252 // Callers of the following functions cannot see the side effect on "expected".
1253 template <typename T>
1254 bool __kmp_atomic_compare_store(std::atomic<T> *p, T expected, T desired) {
1255  return p->compare_exchange_strong(
1256  expected, desired, std::memory_order_acq_rel, std::memory_order_relaxed);
1257 }
1258 
1259 template <typename T>
1260 bool __kmp_atomic_compare_store_acq(std::atomic<T> *p, T expected, T desired) {
1261  return p->compare_exchange_strong(
1262  expected, desired, std::memory_order_acquire, std::memory_order_relaxed);
1263 }
1264 
1265 template <typename T>
1266 bool __kmp_atomic_compare_store_rel(std::atomic<T> *p, T expected, T desired) {
1267  return p->compare_exchange_strong(
1268  expected, desired, std::memory_order_release, std::memory_order_relaxed);
1269 }
1270 
1271 // Symbol lookup on Linux/Windows
1272 #if KMP_OS_WINDOWS
1273 extern void *__kmp_lookup_symbol(const char *name);
1274 #define KMP_DLSYM(name) __kmp_lookup_symbol(name)
1275 #define KMP_DLSYM_NEXT(name) nullptr
1276 #else
1277 #define KMP_DLSYM(name) dlsym(RTLD_DEFAULT, name)
1278 #define KMP_DLSYM_NEXT(name) dlsym(RTLD_NEXT, name)
1279 #endif
1280 
1281 #endif /* KMP_OS_H */