13#include "kmp_atomic.h"
16typedef unsigned char uchar;
17typedef unsigned short ushort;
565#ifndef KMP_GOMP_COMPAT
566int __kmp_atomic_mode = 1;
568int __kmp_atomic_mode = 2;
574kmp_atomic_lock_t __kmp_atomic_lock;
576kmp_atomic_lock_t __kmp_atomic_lock_1i;
578kmp_atomic_lock_t __kmp_atomic_lock_2i;
580kmp_atomic_lock_t __kmp_atomic_lock_4i;
582kmp_atomic_lock_t __kmp_atomic_lock_4r;
584kmp_atomic_lock_t __kmp_atomic_lock_8i;
586kmp_atomic_lock_t __kmp_atomic_lock_8r;
588kmp_atomic_lock_t __kmp_atomic_lock_8c;
590kmp_atomic_lock_t __kmp_atomic_lock_10r;
592kmp_atomic_lock_t __kmp_atomic_lock_16r;
594kmp_atomic_lock_t __kmp_atomic_lock_16c;
596kmp_atomic_lock_t __kmp_atomic_lock_20c;
598kmp_atomic_lock_t __kmp_atomic_lock_32c;
605#define KMP_ATOMIC_VOLATILE volatile
607#if (KMP_ARCH_X86) && KMP_HAVE_QUAD
609static inline Quad_a4_t operator+(Quad_a4_t &lhs, Quad_a4_t &rhs) {
610 return lhs.q + rhs.q;
612static inline Quad_a4_t operator-(Quad_a4_t &lhs, Quad_a4_t &rhs) {
613 return lhs.q - rhs.q;
615static inline Quad_a4_t operator*(Quad_a4_t &lhs, Quad_a4_t &rhs) {
616 return lhs.q * rhs.q;
618static inline Quad_a4_t operator/(Quad_a4_t &lhs, Quad_a4_t &rhs) {
619 return lhs.q / rhs.q;
621static inline bool operator<(Quad_a4_t &lhs, Quad_a4_t &rhs) {
622 return lhs.q < rhs.q;
624static inline bool operator>(Quad_a4_t &lhs, Quad_a4_t &rhs) {
625 return lhs.q > rhs.q;
628static inline Quad_a16_t operator+(Quad_a16_t &lhs, Quad_a16_t &rhs) {
629 return lhs.q + rhs.q;
631static inline Quad_a16_t operator-(Quad_a16_t &lhs, Quad_a16_t &rhs) {
632 return lhs.q - rhs.q;
634static inline Quad_a16_t operator*(Quad_a16_t &lhs, Quad_a16_t &rhs) {
635 return lhs.q * rhs.q;
637static inline Quad_a16_t operator/(Quad_a16_t &lhs, Quad_a16_t &rhs) {
638 return lhs.q / rhs.q;
640static inline bool operator<(Quad_a16_t &lhs, Quad_a16_t &rhs) {
641 return lhs.q < rhs.q;
643static inline bool operator>(Quad_a16_t &lhs, Quad_a16_t &rhs) {
644 return lhs.q > rhs.q;
647static inline kmp_cmplx128_a4_t operator+(kmp_cmplx128_a4_t &lhs,
648 kmp_cmplx128_a4_t &rhs) {
649 return lhs.q + rhs.q;
651static inline kmp_cmplx128_a4_t operator-(kmp_cmplx128_a4_t &lhs,
652 kmp_cmplx128_a4_t &rhs) {
653 return lhs.q - rhs.q;
655static inline kmp_cmplx128_a4_t operator*(kmp_cmplx128_a4_t &lhs,
656 kmp_cmplx128_a4_t &rhs) {
657 return lhs.q * rhs.q;
659static inline kmp_cmplx128_a4_t operator/(kmp_cmplx128_a4_t &lhs,
660 kmp_cmplx128_a4_t &rhs) {
661 return lhs.q / rhs.q;
664static inline kmp_cmplx128_a16_t operator+(kmp_cmplx128_a16_t &lhs,
665 kmp_cmplx128_a16_t &rhs) {
666 return lhs.q + rhs.q;
668static inline kmp_cmplx128_a16_t operator-(kmp_cmplx128_a16_t &lhs,
669 kmp_cmplx128_a16_t &rhs) {
670 return lhs.q - rhs.q;
672static inline kmp_cmplx128_a16_t operator*(kmp_cmplx128_a16_t &lhs,
673 kmp_cmplx128_a16_t &rhs) {
674 return lhs.q * rhs.q;
676static inline kmp_cmplx128_a16_t operator/(kmp_cmplx128_a16_t &lhs,
677 kmp_cmplx128_a16_t &rhs) {
678 return lhs.q / rhs.q;
688#define KMP_CHECK_GTID \
689 if (gtid == KMP_GTID_UNKNOWN) { \
690 gtid = __kmp_entry_gtid(); \
698#define ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, RET_TYPE) \
699 RET_TYPE __kmpc_atomic_##TYPE_ID##_##OP_ID(ident_t *id_ref, int gtid, \
700 TYPE *lhs, TYPE rhs) { \
701 KMP_DEBUG_ASSERT(__kmp_init_serial); \
702 KA_TRACE(100, ("__kmpc_atomic_" #TYPE_ID "_" #OP_ID ": T#%d\n", gtid));
706#define ATOMIC_LOCK0 __kmp_atomic_lock
707#define ATOMIC_LOCK1i __kmp_atomic_lock_1i
708#define ATOMIC_LOCK2i __kmp_atomic_lock_2i
709#define ATOMIC_LOCK4i __kmp_atomic_lock_4i
710#define ATOMIC_LOCK4r __kmp_atomic_lock_4r
711#define ATOMIC_LOCK8i __kmp_atomic_lock_8i
712#define ATOMIC_LOCK8r __kmp_atomic_lock_8r
713#define ATOMIC_LOCK8c __kmp_atomic_lock_8c
714#define ATOMIC_LOCK10r __kmp_atomic_lock_10r
715#define ATOMIC_LOCK16r __kmp_atomic_lock_16r
716#define ATOMIC_LOCK16c __kmp_atomic_lock_16c
717#define ATOMIC_LOCK20c __kmp_atomic_lock_20c
718#define ATOMIC_LOCK32c __kmp_atomic_lock_32c
726#define OP_CRITICAL(OP, LCK_ID) \
727 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
731 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
733#define OP_UPDATE_CRITICAL(TYPE, OP, LCK_ID) \
734 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
735 (*lhs) = (TYPE)((*lhs)OP((TYPE)rhs)); \
736 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
760#ifdef KMP_GOMP_COMPAT
761#define OP_GOMP_CRITICAL(OP, FLAG) \
762 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
764 OP_CRITICAL(OP, 0); \
768#define OP_UPDATE_GOMP_CRITICAL(TYPE, OP, FLAG) \
769 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
771 OP_UPDATE_CRITICAL(TYPE, OP, 0); \
775#define OP_GOMP_CRITICAL(OP, FLAG)
776#define OP_UPDATE_GOMP_CRITICAL(TYPE, OP, FLAG)
780#define KMP_DO_PAUSE _mm_delay_32(1)
790#define OP_CMPXCHG(TYPE, BITS, OP) \
792 TYPE old_value, new_value; \
793 old_value = *(TYPE volatile *)lhs; \
794 new_value = (TYPE)(old_value OP((TYPE)rhs)); \
795 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
796 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
797 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
800 old_value = *(TYPE volatile *)lhs; \
801 new_value = (TYPE)(old_value OP((TYPE)rhs)); \
812#define OP_CMPXCHG_WORKAROUND(TYPE, BITS, OP) \
816 kmp_int##BITS *vvv; \
818 struct _sss old_value, new_value; \
819 old_value.vvv = (kmp_int##BITS *)&old_value.cmp; \
820 new_value.vvv = (kmp_int##BITS *)&new_value.cmp; \
821 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
822 new_value.cmp = (TYPE)(old_value.cmp OP rhs); \
823 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
824 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) old_value.vvv, \
825 *VOLATILE_CAST(kmp_int##BITS *) new_value.vvv)) { \
828 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
829 new_value.cmp = (TYPE)(old_value.cmp OP rhs); \
835#if KMP_OS_WINDOWS && KMP_ARCH_AARCH64
839#define OP_CMPXCHG(TYPE, BITS, OP) \
843 kmp_int##BITS *vvv; \
845 struct _sss old_value, new_value; \
846 old_value.vvv = (kmp_int##BITS *)&old_value.cmp; \
847 new_value.vvv = (kmp_int##BITS *)&new_value.cmp; \
848 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
849 new_value.cmp = old_value.cmp OP rhs; \
850 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
851 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) old_value.vvv, \
852 *VOLATILE_CAST(kmp_int##BITS *) new_value.vvv)) { \
855 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
856 new_value.cmp = old_value.cmp OP rhs; \
860#undef OP_UPDATE_CRITICAL
861#define OP_UPDATE_CRITICAL(TYPE, OP, LCK_ID) \
862 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
863 (*lhs) = (*lhs)OP rhs; \
864 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
868#if KMP_ARCH_X86 || KMP_ARCH_X86_64
872#define ATOMIC_FIXED_ADD(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
874 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
875 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
877 KMP_TEST_THEN_ADD##BITS(lhs, OP rhs); \
880#define ATOMIC_CMPXCHG(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
882 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
883 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
884 OP_CMPXCHG(TYPE, BITS, OP) \
889#define ATOMIC_CMPXCHG_WORKAROUND(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, \
891 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
892 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
893 OP_CMPXCHG_WORKAROUND(TYPE, BITS, OP) \
901#define ATOMIC_FIXED_ADD(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
903 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
904 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
905 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
907 KMP_TEST_THEN_ADD##BITS(lhs, OP rhs); \
910 OP_UPDATE_CRITICAL(TYPE, OP, \
915#define ATOMIC_CMPXCHG(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
917 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
918 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
919 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
920 OP_CMPXCHG(TYPE, BITS, OP) \
923 OP_UPDATE_CRITICAL(TYPE, OP, \
930#define ATOMIC_CMPXCHG_WORKAROUND(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, \
932 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
933 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
934 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
935 OP_CMPXCHG(TYPE, BITS, OP) \
938 OP_UPDATE_CRITICAL(TYPE, OP, \
947ATOMIC_FIXED_ADD(fixed4, add, kmp_int32, 32, +, 4i, 3,
949ATOMIC_FIXED_ADD(fixed4, sub, kmp_int32, 32, -, 4i, 3,
952ATOMIC_CMPXCHG(float4, add, kmp_real32, 32, +, 4r, 3,
954ATOMIC_CMPXCHG(float4, sub, kmp_real32, 32, -, 4r, 3,
958ATOMIC_FIXED_ADD(fixed8, add, kmp_int64, 64, +, 8i, 7,
960ATOMIC_FIXED_ADD(fixed8, sub, kmp_int64, 64, -, 8i, 7,
963ATOMIC_CMPXCHG(float8, add, kmp_real64, 64, +, 8r, 7,
965ATOMIC_CMPXCHG(float8, sub, kmp_real64, 64, -, 8r, 7,
983ATOMIC_CMPXCHG(fixed1, add, kmp_int8, 8, +, 1i, 0,
985ATOMIC_CMPXCHG(fixed1, andb, kmp_int8, 8, &, 1i, 0,
987ATOMIC_CMPXCHG(fixed1, div, kmp_int8, 8, /, 1i, 0,
989ATOMIC_CMPXCHG(fixed1u, div, kmp_uint8, 8, /, 1i, 0,
991ATOMIC_CMPXCHG(fixed1, mul, kmp_int8, 8, *, 1i, 0,
993ATOMIC_CMPXCHG(fixed1, orb, kmp_int8, 8, |, 1i, 0,
995ATOMIC_CMPXCHG(fixed1, shl, kmp_int8, 8, <<, 1i, 0,
997ATOMIC_CMPXCHG(fixed1, shr, kmp_int8, 8, >>, 1i, 0,
999ATOMIC_CMPXCHG(fixed1u, shr, kmp_uint8, 8, >>, 1i, 0,
1001ATOMIC_CMPXCHG(fixed1, sub, kmp_int8, 8, -, 1i, 0,
1003ATOMIC_CMPXCHG(fixed1, xor, kmp_int8, 8, ^, 1i, 0,
1005ATOMIC_CMPXCHG(fixed2, add, kmp_int16, 16, +, 2i, 1,
1007ATOMIC_CMPXCHG(fixed2, andb, kmp_int16, 16, &, 2i, 1,
1009ATOMIC_CMPXCHG(fixed2, div, kmp_int16, 16, /, 2i, 1,
1011ATOMIC_CMPXCHG(fixed2u, div, kmp_uint16, 16, /, 2i, 1,
1013ATOMIC_CMPXCHG(fixed2, mul, kmp_int16, 16, *, 2i, 1,
1015ATOMIC_CMPXCHG(fixed2, orb, kmp_int16, 16, |, 2i, 1,
1017ATOMIC_CMPXCHG(fixed2, shl, kmp_int16, 16, <<, 2i, 1,
1019ATOMIC_CMPXCHG(fixed2, shr, kmp_int16, 16, >>, 2i, 1,
1021ATOMIC_CMPXCHG(fixed2u, shr, kmp_uint16, 16, >>, 2i, 1,
1023ATOMIC_CMPXCHG(fixed2, sub, kmp_int16, 16, -, 2i, 1,
1025ATOMIC_CMPXCHG(fixed2, xor, kmp_int16, 16, ^, 2i, 1,
1027ATOMIC_CMPXCHG(fixed4, andb, kmp_int32, 32, &, 4i, 3,
1029ATOMIC_CMPXCHG(fixed4, div, kmp_int32, 32, /, 4i, 3,
1031ATOMIC_CMPXCHG(fixed4u, div, kmp_uint32, 32, /, 4i, 3,
1033ATOMIC_CMPXCHG(fixed4, mul, kmp_int32, 32, *, 4i, 3,
1035ATOMIC_CMPXCHG(fixed4, orb, kmp_int32, 32, |, 4i, 3,
1037ATOMIC_CMPXCHG(fixed4, shl, kmp_int32, 32, <<, 4i, 3,
1039ATOMIC_CMPXCHG(fixed4, shr, kmp_int32, 32, >>, 4i, 3,
1041ATOMIC_CMPXCHG(fixed4u, shr, kmp_uint32, 32, >>, 4i, 3,
1043ATOMIC_CMPXCHG(fixed4, xor, kmp_int32, 32, ^, 4i, 3,
1045ATOMIC_CMPXCHG(fixed8, andb, kmp_int64, 64, &, 8i, 7,
1047ATOMIC_CMPXCHG(fixed8, div, kmp_int64, 64, /, 8i, 7,
1049ATOMIC_CMPXCHG(fixed8u, div, kmp_uint64, 64, /, 8i, 7,
1051ATOMIC_CMPXCHG(fixed8, mul, kmp_int64, 64, *, 8i, 7,
1053ATOMIC_CMPXCHG(fixed8, orb, kmp_int64, 64, |, 8i, 7,
1055ATOMIC_CMPXCHG(fixed8, shl, kmp_int64, 64, <<, 8i, 7,
1057ATOMIC_CMPXCHG(fixed8, shr, kmp_int64, 64, >>, 8i, 7,
1059ATOMIC_CMPXCHG(fixed8u, shr, kmp_uint64, 64, >>, 8i, 7,
1061ATOMIC_CMPXCHG(fixed8, xor, kmp_int64, 64, ^, 8i, 7,
1063ATOMIC_CMPXCHG(float4, div, kmp_real32, 32, /, 4r, 3,
1065ATOMIC_CMPXCHG(float4, mul, kmp_real32, 32, *, 4r, 3,
1067ATOMIC_CMPXCHG(float8, div, kmp_real64, 64, /, 8r, 7,
1069ATOMIC_CMPXCHG(float8, mul, kmp_real64, 64, *, 8r, 7,
1079#define ATOMIC_CRIT_L(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
1080 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1081 OP_GOMP_CRITICAL(= *lhs OP, GOMP_FLAG) \
1082 OP_CRITICAL(= *lhs OP, LCK_ID) \
1085#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1089#define ATOMIC_CMPX_L(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, GOMP_FLAG) \
1090 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1091 OP_GOMP_CRITICAL(= *lhs OP, GOMP_FLAG) \
1092 OP_CMPXCHG(TYPE, BITS, OP) \
1098#define ATOMIC_CMPX_L(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, GOMP_FLAG) \
1099 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1100 OP_GOMP_CRITICAL(= *lhs OP, GOMP_FLAG) \
1101 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1102 OP_CMPXCHG(TYPE, BITS, OP) \
1105 OP_CRITICAL(= *lhs OP, LCK_ID) \
1110ATOMIC_CMPX_L(fixed1, andl,
char, 8, &&, 1i, 0,
1112ATOMIC_CMPX_L(fixed1, orl,
char, 8, ||, 1i, 0,
1114ATOMIC_CMPX_L(fixed2, andl,
short, 16, &&, 2i, 1,
1116ATOMIC_CMPX_L(fixed2, orl,
short, 16, ||, 2i, 1,
1118ATOMIC_CMPX_L(fixed4, andl, kmp_int32, 32, &&, 4i, 3,
1120ATOMIC_CMPX_L(fixed4, orl, kmp_int32, 32, ||, 4i, 3,
1122ATOMIC_CMPX_L(fixed8, andl, kmp_int64, 64, &&, 8i, 7,
1124ATOMIC_CMPX_L(fixed8, orl, kmp_int64, 64, ||, 8i, 7,
1136#define MIN_MAX_CRITSECT(OP, LCK_ID) \
1137 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
1139 if (*lhs OP rhs) { \
1142 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
1145#ifdef KMP_GOMP_COMPAT
1146#define GOMP_MIN_MAX_CRITSECT(OP, FLAG) \
1147 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
1149 MIN_MAX_CRITSECT(OP, 0); \
1153#define GOMP_MIN_MAX_CRITSECT(OP, FLAG)
1157#define MIN_MAX_CMPXCHG(TYPE, BITS, OP) \
1159 TYPE KMP_ATOMIC_VOLATILE temp_val; \
1162 old_value = temp_val; \
1163 while (old_value OP rhs && \
1164 !KMP_COMPARE_AND_STORE_ACQ##BITS( \
1165 (kmp_int##BITS *)lhs, \
1166 *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
1167 *VOLATILE_CAST(kmp_int##BITS *) & rhs)) { \
1169 old_value = temp_val; \
1175#define MIN_MAX_CRITICAL(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
1176 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1177 if (*lhs OP rhs) { \
1178 GOMP_MIN_MAX_CRITSECT(OP, GOMP_FLAG) \
1179 MIN_MAX_CRITSECT(OP, LCK_ID) \
1183#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1187#define MIN_MAX_COMPXCHG(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
1189 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1190 if (*lhs OP rhs) { \
1191 GOMP_MIN_MAX_CRITSECT(OP, GOMP_FLAG) \
1192 MIN_MAX_CMPXCHG(TYPE, BITS, OP) \
1199#define MIN_MAX_COMPXCHG(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
1201 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1202 if (*lhs OP rhs) { \
1203 GOMP_MIN_MAX_CRITSECT(OP, GOMP_FLAG) \
1204 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1205 MIN_MAX_CMPXCHG(TYPE, BITS, OP) \
1208 MIN_MAX_CRITSECT(OP, LCK_ID) \
1214MIN_MAX_COMPXCHG(fixed1, max,
char, 8, <, 1i, 0,
1216MIN_MAX_COMPXCHG(fixed1, min,
char, 8, >, 1i, 0,
1218MIN_MAX_COMPXCHG(fixed2, max,
short, 16, <, 2i, 1,
1220MIN_MAX_COMPXCHG(fixed2, min,
short, 16, >, 2i, 1,
1222MIN_MAX_COMPXCHG(fixed4, max, kmp_int32, 32, <, 4i, 3,
1224MIN_MAX_COMPXCHG(fixed4, min, kmp_int32, 32, >, 4i, 3,
1226MIN_MAX_COMPXCHG(fixed8, max, kmp_int64, 64, <, 8i, 7,
1228MIN_MAX_COMPXCHG(fixed8, min, kmp_int64, 64, >, 8i, 7,
1230MIN_MAX_COMPXCHG(float4, max, kmp_real32, 32, <, 4r, 3,
1232MIN_MAX_COMPXCHG(float4, min, kmp_real32, 32, >, 4r, 3,
1234MIN_MAX_COMPXCHG(float8, max, kmp_real64, 64, <, 8r, 7,
1236MIN_MAX_COMPXCHG(float8, min, kmp_real64, 64, >, 8r, 7,
1239MIN_MAX_CRITICAL(float16, max, QUAD_LEGACY, <, 16r,
1241MIN_MAX_CRITICAL(float16, min, QUAD_LEGACY, >, 16r,
1244MIN_MAX_CRITICAL(float16, max_a16, Quad_a16_t, <, 16r,
1246MIN_MAX_CRITICAL(float16, min_a16, Quad_a16_t, >, 16r,
1253#define ATOMIC_CRIT_EQV(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
1254 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1255 OP_GOMP_CRITICAL(^= (TYPE) ~, GOMP_FLAG) \
1256 OP_CRITICAL(^= (TYPE) ~, LCK_ID) \
1260#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1263#define ATOMIC_CMPX_EQV(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
1265 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1266 OP_GOMP_CRITICAL(^= (TYPE) ~, GOMP_FLAG) \
1267 OP_CMPXCHG(TYPE, BITS, OP) \
1273#define ATOMIC_CMPX_EQV(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
1275 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1276 OP_GOMP_CRITICAL(^= (TYPE) ~, GOMP_FLAG) \
1277 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1278 OP_CMPXCHG(TYPE, BITS, OP) \
1281 OP_CRITICAL(^= (TYPE) ~, LCK_ID) \
1286ATOMIC_CMPXCHG(fixed1, neqv, kmp_int8, 8, ^, 1i, 0,
1288ATOMIC_CMPXCHG(fixed2, neqv, kmp_int16, 16, ^, 2i, 1,
1290ATOMIC_CMPXCHG(fixed4, neqv, kmp_int32, 32, ^, 4i, 3,
1292ATOMIC_CMPXCHG(fixed8, neqv, kmp_int64, 64, ^, 8i, 7,
1294ATOMIC_CMPX_EQV(fixed1, eqv, kmp_int8, 8, ^~, 1i, 0,
1296ATOMIC_CMPX_EQV(fixed2, eqv, kmp_int16, 16, ^~, 2i, 1,
1298ATOMIC_CMPX_EQV(fixed4, eqv, kmp_int32, 32, ^~, 4i, 3,
1300ATOMIC_CMPX_EQV(fixed8, eqv, kmp_int64, 64, ^~, 8i, 7,
1309#define ATOMIC_CRITICAL(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
1310 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1311 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
1312 OP_UPDATE_CRITICAL(TYPE, OP, LCK_ID) \
1317ATOMIC_CRITICAL(float10, add,
long double, +, 10r,
1319ATOMIC_CRITICAL(float10, sub,
long double, -, 10r,
1321ATOMIC_CRITICAL(float10, mul,
long double, *, 10r,
1323ATOMIC_CRITICAL(float10, div,
long double, /, 10r,
1327ATOMIC_CRITICAL(float16, add, QUAD_LEGACY, +, 16r,
1329ATOMIC_CRITICAL(float16, sub, QUAD_LEGACY, -, 16r,
1331ATOMIC_CRITICAL(float16, mul, QUAD_LEGACY, *, 16r,
1333ATOMIC_CRITICAL(float16, div, QUAD_LEGACY, /, 16r,
1336ATOMIC_CRITICAL(float16, add_a16, Quad_a16_t, +, 16r,
1338ATOMIC_CRITICAL(float16, sub_a16, Quad_a16_t, -, 16r,
1340ATOMIC_CRITICAL(float16, mul_a16, Quad_a16_t, *, 16r,
1342ATOMIC_CRITICAL(float16, div_a16, Quad_a16_t, /, 16r,
1350ATOMIC_CMPXCHG_WORKAROUND(cmplx4, add, kmp_cmplx32, 64, +, 8c, 7,
1352ATOMIC_CMPXCHG_WORKAROUND(cmplx4, sub, kmp_cmplx32, 64, -, 8c, 7,
1354ATOMIC_CMPXCHG_WORKAROUND(cmplx4, mul, kmp_cmplx32, 64, *, 8c, 7,
1356ATOMIC_CMPXCHG_WORKAROUND(cmplx4, div, kmp_cmplx32, 64, /, 8c, 7,
1360ATOMIC_CRITICAL(cmplx4, add, kmp_cmplx32, +, 8c, 1)
1361ATOMIC_CRITICAL(cmplx4, sub, kmp_cmplx32, -, 8c, 1)
1362ATOMIC_CRITICAL(cmplx4, mul, kmp_cmplx32, *, 8c, 1)
1363ATOMIC_CRITICAL(cmplx4, div, kmp_cmplx32, /, 8c, 1)
1366ATOMIC_CRITICAL(cmplx8, add, kmp_cmplx64, +, 16c, 1)
1367ATOMIC_CRITICAL(cmplx8, sub, kmp_cmplx64, -, 16c, 1)
1368ATOMIC_CRITICAL(cmplx8, mul, kmp_cmplx64, *, 16c, 1)
1369ATOMIC_CRITICAL(cmplx8, div, kmp_cmplx64, /, 16c, 1)
1370ATOMIC_CRITICAL(cmplx10, add, kmp_cmplx80, +, 20c,
1372ATOMIC_CRITICAL(cmplx10, sub, kmp_cmplx80, -, 20c,
1374ATOMIC_CRITICAL(cmplx10, mul, kmp_cmplx80, *, 20c,
1376ATOMIC_CRITICAL(cmplx10, div, kmp_cmplx80, /, 20c,
1379ATOMIC_CRITICAL(cmplx16, add, CPLX128_LEG, +, 32c,
1381ATOMIC_CRITICAL(cmplx16, sub, CPLX128_LEG, -, 32c,
1383ATOMIC_CRITICAL(cmplx16, mul, CPLX128_LEG, *, 32c,
1385ATOMIC_CRITICAL(cmplx16, div, CPLX128_LEG, /, 32c,
1388ATOMIC_CRITICAL(cmplx16, add_a16, kmp_cmplx128_a16_t, +, 32c,
1390ATOMIC_CRITICAL(cmplx16, sub_a16, kmp_cmplx128_a16_t, -, 32c,
1392ATOMIC_CRITICAL(cmplx16, mul_a16, kmp_cmplx128_a16_t, *, 32c,
1394ATOMIC_CRITICAL(cmplx16, div_a16, kmp_cmplx128_a16_t, /, 32c,
1401#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1409#define OP_CRITICAL_REV(TYPE, OP, LCK_ID) \
1410 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
1412 (*lhs) = (TYPE)((rhs)OP(*lhs)); \
1414 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
1416#ifdef KMP_GOMP_COMPAT
1417#define OP_GOMP_CRITICAL_REV(TYPE, OP, FLAG) \
1418 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
1420 OP_CRITICAL_REV(TYPE, OP, 0); \
1425#define OP_GOMP_CRITICAL_REV(TYPE, OP, FLAG)
1433#define ATOMIC_BEGIN_REV(TYPE_ID, OP_ID, TYPE, RET_TYPE) \
1434 RET_TYPE __kmpc_atomic_##TYPE_ID##_##OP_ID##_rev(ident_t *id_ref, int gtid, \
1435 TYPE *lhs, TYPE rhs) { \
1436 KMP_DEBUG_ASSERT(__kmp_init_serial); \
1437 KA_TRACE(100, ("__kmpc_atomic_" #TYPE_ID "_" #OP_ID "_rev: T#%d\n", gtid));
1446#define OP_CMPXCHG_REV(TYPE, BITS, OP) \
1448 TYPE KMP_ATOMIC_VOLATILE temp_val; \
1449 TYPE old_value, new_value; \
1451 old_value = temp_val; \
1452 new_value = (TYPE)(rhs OP old_value); \
1453 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
1454 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
1455 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
1459 old_value = temp_val; \
1460 new_value = (TYPE)(rhs OP old_value); \
1465#define ATOMIC_CMPXCHG_REV(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, GOMP_FLAG) \
1466 ATOMIC_BEGIN_REV(TYPE_ID, OP_ID, TYPE, void) \
1467 OP_GOMP_CRITICAL_REV(TYPE, OP, GOMP_FLAG) \
1468 OP_CMPXCHG_REV(TYPE, BITS, OP) \
1485ATOMIC_CMPXCHG_REV(fixed1, div, kmp_int8, 8, /, 1i,
1487ATOMIC_CMPXCHG_REV(fixed1u, div, kmp_uint8, 8, /, 1i,
1489ATOMIC_CMPXCHG_REV(fixed1, shl, kmp_int8, 8, <<, 1i,
1491ATOMIC_CMPXCHG_REV(fixed1, shr, kmp_int8, 8, >>, 1i,
1493ATOMIC_CMPXCHG_REV(fixed1u, shr, kmp_uint8, 8, >>, 1i,
1495ATOMIC_CMPXCHG_REV(fixed1, sub, kmp_int8, 8, -, 1i,
1498ATOMIC_CMPXCHG_REV(fixed2, div, kmp_int16, 16, /, 2i,
1500ATOMIC_CMPXCHG_REV(fixed2u, div, kmp_uint16, 16, /, 2i,
1502ATOMIC_CMPXCHG_REV(fixed2, shl, kmp_int16, 16, <<, 2i,
1504ATOMIC_CMPXCHG_REV(fixed2, shr, kmp_int16, 16, >>, 2i,
1506ATOMIC_CMPXCHG_REV(fixed2u, shr, kmp_uint16, 16, >>, 2i,
1508ATOMIC_CMPXCHG_REV(fixed2, sub, kmp_int16, 16, -, 2i,
1511ATOMIC_CMPXCHG_REV(fixed4, div, kmp_int32, 32, /, 4i,
1513ATOMIC_CMPXCHG_REV(fixed4u, div, kmp_uint32, 32, /, 4i,
1515ATOMIC_CMPXCHG_REV(fixed4, shl, kmp_int32, 32, <<, 4i,
1517ATOMIC_CMPXCHG_REV(fixed4, shr, kmp_int32, 32, >>, 4i,
1519ATOMIC_CMPXCHG_REV(fixed4u, shr, kmp_uint32, 32, >>, 4i,
1521ATOMIC_CMPXCHG_REV(fixed4, sub, kmp_int32, 32, -, 4i,
1524ATOMIC_CMPXCHG_REV(fixed8, div, kmp_int64, 64, /, 8i,
1526ATOMIC_CMPXCHG_REV(fixed8u, div, kmp_uint64, 64, /, 8i,
1528ATOMIC_CMPXCHG_REV(fixed8, shl, kmp_int64, 64, <<, 8i,
1530ATOMIC_CMPXCHG_REV(fixed8, shr, kmp_int64, 64, >>, 8i,
1532ATOMIC_CMPXCHG_REV(fixed8u, shr, kmp_uint64, 64, >>, 8i,
1534ATOMIC_CMPXCHG_REV(fixed8, sub, kmp_int64, 64, -, 8i,
1537ATOMIC_CMPXCHG_REV(float4, div, kmp_real32, 32, /, 4r,
1539ATOMIC_CMPXCHG_REV(float4, sub, kmp_real32, 32, -, 4r,
1542ATOMIC_CMPXCHG_REV(float8, div, kmp_real64, 64, /, 8r,
1544ATOMIC_CMPXCHG_REV(float8, sub, kmp_real64, 64, -, 8r,
1554#define ATOMIC_CRITICAL_REV(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
1555 ATOMIC_BEGIN_REV(TYPE_ID, OP_ID, TYPE, void) \
1556 OP_GOMP_CRITICAL_REV(TYPE, OP, GOMP_FLAG) \
1557 OP_CRITICAL_REV(TYPE, OP, LCK_ID) \
1562ATOMIC_CRITICAL_REV(float10, sub,
long double, -, 10r,
1564ATOMIC_CRITICAL_REV(float10, div,
long double, /, 10r,
1568ATOMIC_CRITICAL_REV(float16, sub, QUAD_LEGACY, -, 16r,
1570ATOMIC_CRITICAL_REV(float16, div, QUAD_LEGACY, /, 16r,
1573ATOMIC_CRITICAL_REV(float16, sub_a16, Quad_a16_t, -, 16r,
1575ATOMIC_CRITICAL_REV(float16, div_a16, Quad_a16_t, /, 16r,
1581ATOMIC_CRITICAL_REV(cmplx4, sub, kmp_cmplx32, -, 8c,
1583ATOMIC_CRITICAL_REV(cmplx4, div, kmp_cmplx32, /, 8c,
1585ATOMIC_CRITICAL_REV(cmplx8, sub, kmp_cmplx64, -, 16c,
1587ATOMIC_CRITICAL_REV(cmplx8, div, kmp_cmplx64, /, 16c,
1589ATOMIC_CRITICAL_REV(cmplx10, sub, kmp_cmplx80, -, 20c,
1591ATOMIC_CRITICAL_REV(cmplx10, div, kmp_cmplx80, /, 20c,
1594ATOMIC_CRITICAL_REV(cmplx16, sub, CPLX128_LEG, -, 32c,
1596ATOMIC_CRITICAL_REV(cmplx16, div, CPLX128_LEG, /, 32c,
1599ATOMIC_CRITICAL_REV(cmplx16, sub_a16, kmp_cmplx128_a16_t, -, 32c,
1601ATOMIC_CRITICAL_REV(cmplx16, div_a16, kmp_cmplx128_a16_t, /, 32c,
1620#define ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1621 void __kmpc_atomic_##TYPE_ID##_##OP_ID##_##RTYPE_ID( \
1622 ident_t *id_ref, int gtid, TYPE *lhs, RTYPE rhs) { \
1623 KMP_DEBUG_ASSERT(__kmp_init_serial); \
1625 ("__kmpc_atomic_" #TYPE_ID "_" #OP_ID "_" #RTYPE_ID ": T#%d\n", \
1629#define ATOMIC_CRITICAL_FP(TYPE_ID, TYPE, OP_ID, OP, RTYPE_ID, RTYPE, LCK_ID, \
1631 ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1632 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
1633 OP_UPDATE_CRITICAL(TYPE, OP, LCK_ID) \
1637#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1640#define ATOMIC_CMPXCHG_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE, \
1641 LCK_ID, MASK, GOMP_FLAG) \
1642 ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1643 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
1644 OP_CMPXCHG(TYPE, BITS, OP) \
1650#define ATOMIC_CMPXCHG_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE, \
1651 LCK_ID, MASK, GOMP_FLAG) \
1652 ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1653 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
1654 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1655 OP_CMPXCHG(TYPE, BITS, OP) \
1658 OP_UPDATE_CRITICAL(TYPE, OP, \
1665#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1667#define ATOMIC_CMPXCHG_REV_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, \
1668 RTYPE, LCK_ID, MASK, GOMP_FLAG) \
1669 ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1670 OP_GOMP_CRITICAL_REV(TYPE, OP, GOMP_FLAG) \
1671 OP_CMPXCHG_REV(TYPE, BITS, OP) \
1673#define ATOMIC_CRITICAL_REV_FP(TYPE_ID, TYPE, OP_ID, OP, RTYPE_ID, RTYPE, \
1674 LCK_ID, GOMP_FLAG) \
1675 ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1676 OP_GOMP_CRITICAL_REV(TYPE, OP, GOMP_FLAG) \
1677 OP_CRITICAL_REV(TYPE, OP, LCK_ID) \
1682ATOMIC_CMPXCHG_MIX(fixed1,
char, mul, 8, *, float8, kmp_real64, 1i, 0,
1684ATOMIC_CMPXCHG_MIX(fixed1,
char, div, 8, /, float8, kmp_real64, 1i, 0,
1686ATOMIC_CMPXCHG_MIX(fixed2,
short, mul, 16, *, float8, kmp_real64, 2i, 1,
1688ATOMIC_CMPXCHG_MIX(fixed2,
short, div, 16, /, float8, kmp_real64, 2i, 1,
1690ATOMIC_CMPXCHG_MIX(fixed4, kmp_int32, mul, 32, *, float8, kmp_real64, 4i, 3,
1692ATOMIC_CMPXCHG_MIX(fixed4, kmp_int32, div, 32, /, float8, kmp_real64, 4i, 3,
1694ATOMIC_CMPXCHG_MIX(fixed8, kmp_int64, mul, 64, *, float8, kmp_real64, 8i, 7,
1696ATOMIC_CMPXCHG_MIX(fixed8, kmp_int64, div, 64, /, float8, kmp_real64, 8i, 7,
1698ATOMIC_CMPXCHG_MIX(float4, kmp_real32, add, 32, +, float8, kmp_real64, 4r, 3,
1700ATOMIC_CMPXCHG_MIX(float4, kmp_real32, sub, 32, -, float8, kmp_real64, 4r, 3,
1702ATOMIC_CMPXCHG_MIX(float4, kmp_real32, mul, 32, *, float8, kmp_real64, 4r, 3,
1704ATOMIC_CMPXCHG_MIX(float4, kmp_real32, div, 32, /, float8, kmp_real64, 4r, 3,
1710ATOMIC_CMPXCHG_MIX(fixed1,
char, add, 8, +, fp, _Quad, 1i, 0,
1712ATOMIC_CMPXCHG_MIX(fixed1u, uchar, add, 8, +, fp, _Quad, 1i, 0,
1714ATOMIC_CMPXCHG_MIX(fixed1,
char, sub, 8, -, fp, _Quad, 1i, 0,
1716ATOMIC_CMPXCHG_MIX(fixed1u, uchar, sub, 8, -, fp, _Quad, 1i, 0,
1718ATOMIC_CMPXCHG_MIX(fixed1,
char, mul, 8, *, fp, _Quad, 1i, 0,
1720ATOMIC_CMPXCHG_MIX(fixed1u, uchar, mul, 8, *, fp, _Quad, 1i, 0,
1722ATOMIC_CMPXCHG_MIX(fixed1,
char, div, 8, /, fp, _Quad, 1i, 0,
1724ATOMIC_CMPXCHG_MIX(fixed1u, uchar, div, 8, /, fp, _Quad, 1i, 0,
1727ATOMIC_CMPXCHG_MIX(fixed2,
short, add, 16, +, fp, _Quad, 2i, 1,
1729ATOMIC_CMPXCHG_MIX(fixed2u, ushort, add, 16, +, fp, _Quad, 2i, 1,
1731ATOMIC_CMPXCHG_MIX(fixed2,
short, sub, 16, -, fp, _Quad, 2i, 1,
1733ATOMIC_CMPXCHG_MIX(fixed2u, ushort, sub, 16, -, fp, _Quad, 2i, 1,
1735ATOMIC_CMPXCHG_MIX(fixed2,
short, mul, 16, *, fp, _Quad, 2i, 1,
1737ATOMIC_CMPXCHG_MIX(fixed2u, ushort, mul, 16, *, fp, _Quad, 2i, 1,
1739ATOMIC_CMPXCHG_MIX(fixed2,
short, div, 16, /, fp, _Quad, 2i, 1,
1741ATOMIC_CMPXCHG_MIX(fixed2u, ushort, div, 16, /, fp, _Quad, 2i, 1,
1744ATOMIC_CMPXCHG_MIX(fixed4, kmp_int32, add, 32, +, fp, _Quad, 4i, 3,
1746ATOMIC_CMPXCHG_MIX(fixed4u, kmp_uint32, add, 32, +, fp, _Quad, 4i, 3,
1748ATOMIC_CMPXCHG_MIX(fixed4, kmp_int32, sub, 32, -, fp, _Quad, 4i, 3,
1750ATOMIC_CMPXCHG_MIX(fixed4u, kmp_uint32, sub, 32, -, fp, _Quad, 4i, 3,
1752ATOMIC_CMPXCHG_MIX(fixed4, kmp_int32, mul, 32, *, fp, _Quad, 4i, 3,
1754ATOMIC_CMPXCHG_MIX(fixed4u, kmp_uint32, mul, 32, *, fp, _Quad, 4i, 3,
1756ATOMIC_CMPXCHG_MIX(fixed4, kmp_int32, div, 32, /, fp, _Quad, 4i, 3,
1758ATOMIC_CMPXCHG_MIX(fixed4u, kmp_uint32, div, 32, /, fp, _Quad, 4i, 3,
1761ATOMIC_CMPXCHG_MIX(fixed8, kmp_int64, add, 64, +, fp, _Quad, 8i, 7,
1763ATOMIC_CMPXCHG_MIX(fixed8u, kmp_uint64, add, 64, +, fp, _Quad, 8i, 7,
1765ATOMIC_CMPXCHG_MIX(fixed8, kmp_int64, sub, 64, -, fp, _Quad, 8i, 7,
1767ATOMIC_CMPXCHG_MIX(fixed8u, kmp_uint64, sub, 64, -, fp, _Quad, 8i, 7,
1769ATOMIC_CMPXCHG_MIX(fixed8, kmp_int64, mul, 64, *, fp, _Quad, 8i, 7,
1771ATOMIC_CMPXCHG_MIX(fixed8u, kmp_uint64, mul, 64, *, fp, _Quad, 8i, 7,
1773ATOMIC_CMPXCHG_MIX(fixed8, kmp_int64, div, 64, /, fp, _Quad, 8i, 7,
1775ATOMIC_CMPXCHG_MIX(fixed8u, kmp_uint64, div, 64, /, fp, _Quad, 8i, 7,
1778ATOMIC_CMPXCHG_MIX(float4, kmp_real32, add, 32, +, fp, _Quad, 4r, 3,
1780ATOMIC_CMPXCHG_MIX(float4, kmp_real32, sub, 32, -, fp, _Quad, 4r, 3,
1782ATOMIC_CMPXCHG_MIX(float4, kmp_real32, mul, 32, *, fp, _Quad, 4r, 3,
1784ATOMIC_CMPXCHG_MIX(float4, kmp_real32, div, 32, /, fp, _Quad, 4r, 3,
1787ATOMIC_CMPXCHG_MIX(float8, kmp_real64, add, 64, +, fp, _Quad, 8r, 7,
1789ATOMIC_CMPXCHG_MIX(float8, kmp_real64, sub, 64, -, fp, _Quad, 8r, 7,
1791ATOMIC_CMPXCHG_MIX(float8, kmp_real64, mul, 64, *, fp, _Quad, 8r, 7,
1793ATOMIC_CMPXCHG_MIX(float8, kmp_real64, div, 64, /, fp, _Quad, 8r, 7,
1796ATOMIC_CRITICAL_FP(float10,
long double, add, +, fp, _Quad, 10r,
1798ATOMIC_CRITICAL_FP(float10,
long double, sub, -, fp, _Quad, 10r,
1800ATOMIC_CRITICAL_FP(float10,
long double, mul, *, fp, _Quad, 10r,
1802ATOMIC_CRITICAL_FP(float10,
long double, div, /, fp, _Quad, 10r,
1805#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1807ATOMIC_CMPXCHG_REV_MIX(fixed1,
char, sub_rev, 8, -, fp, _Quad, 1i, 0,
1809ATOMIC_CMPXCHG_REV_MIX(fixed1u, uchar, sub_rev, 8, -, fp, _Quad, 1i, 0,
1811ATOMIC_CMPXCHG_REV_MIX(fixed1,
char, div_rev, 8, /, fp, _Quad, 1i, 0,
1813ATOMIC_CMPXCHG_REV_MIX(fixed1u, uchar, div_rev, 8, /, fp, _Quad, 1i, 0,
1816ATOMIC_CMPXCHG_REV_MIX(fixed2,
short, sub_rev, 16, -, fp, _Quad, 2i, 1,
1818ATOMIC_CMPXCHG_REV_MIX(fixed2u, ushort, sub_rev, 16, -, fp, _Quad, 2i, 1,
1820ATOMIC_CMPXCHG_REV_MIX(fixed2,
short, div_rev, 16, /, fp, _Quad, 2i, 1,
1822ATOMIC_CMPXCHG_REV_MIX(fixed2u, ushort, div_rev, 16, /, fp, _Quad, 2i, 1,
1825ATOMIC_CMPXCHG_REV_MIX(fixed4, kmp_int32, sub_rev, 32, -, fp, _Quad, 4i, 3,
1827ATOMIC_CMPXCHG_REV_MIX(fixed4u, kmp_uint32, sub_rev, 32, -, fp, _Quad, 4i, 3,
1829ATOMIC_CMPXCHG_REV_MIX(fixed4, kmp_int32, div_rev, 32, /, fp, _Quad, 4i, 3,
1831ATOMIC_CMPXCHG_REV_MIX(fixed4u, kmp_uint32, div_rev, 32, /, fp, _Quad, 4i, 3,
1834ATOMIC_CMPXCHG_REV_MIX(fixed8, kmp_int64, sub_rev, 64, -, fp, _Quad, 8i, 7,
1836ATOMIC_CMPXCHG_REV_MIX(fixed8u, kmp_uint64, sub_rev, 64, -, fp, _Quad, 8i, 7,
1838ATOMIC_CMPXCHG_REV_MIX(fixed8, kmp_int64, div_rev, 64, /, fp, _Quad, 8i, 7,
1840ATOMIC_CMPXCHG_REV_MIX(fixed8u, kmp_uint64, div_rev, 64, /, fp, _Quad, 8i, 7,
1843ATOMIC_CMPXCHG_REV_MIX(float4, kmp_real32, sub_rev, 32, -, fp, _Quad, 4r, 3,
1845ATOMIC_CMPXCHG_REV_MIX(float4, kmp_real32, div_rev, 32, /, fp, _Quad, 4r, 3,
1848ATOMIC_CMPXCHG_REV_MIX(float8, kmp_real64, sub_rev, 64, -, fp, _Quad, 8r, 7,
1850ATOMIC_CMPXCHG_REV_MIX(float8, kmp_real64, div_rev, 64, /, fp, _Quad, 8r, 7,
1853ATOMIC_CRITICAL_REV_FP(float10,
long double, sub_rev, -, fp, _Quad, 10r,
1855ATOMIC_CRITICAL_REV_FP(float10,
long double, div_rev, /, fp, _Quad, 10r,
1861#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1866#define ATOMIC_CMPXCHG_CMPLX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE, \
1867 LCK_ID, MASK, GOMP_FLAG) \
1868 ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1869 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
1870 OP_CMPXCHG_WORKAROUND(TYPE, BITS, OP) \
1874#define ATOMIC_CMPXCHG_CMPLX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE, \
1875 LCK_ID, MASK, GOMP_FLAG) \
1876 ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1877 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
1878 OP_CMPXCHG(TYPE, BITS, OP) \
1884#define ATOMIC_CMPXCHG_CMPLX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE, \
1885 LCK_ID, MASK, GOMP_FLAG) \
1886 ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1887 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
1888 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1889 OP_CMPXCHG(TYPE, BITS, OP) \
1892 OP_UPDATE_CRITICAL(TYPE, OP, \
1898ATOMIC_CMPXCHG_CMPLX(cmplx4, kmp_cmplx32, add, 64, +, cmplx8, kmp_cmplx64, 8c,
1900ATOMIC_CMPXCHG_CMPLX(cmplx4, kmp_cmplx32, sub, 64, -, cmplx8, kmp_cmplx64, 8c,
1902ATOMIC_CMPXCHG_CMPLX(cmplx4, kmp_cmplx32, mul, 64, *, cmplx8, kmp_cmplx64, 8c,
1904ATOMIC_CMPXCHG_CMPLX(cmplx4, kmp_cmplx32, div, 64, /, cmplx8, kmp_cmplx64, 8c,
1908#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1919#define ATOMIC_BEGIN_READ(TYPE_ID, OP_ID, TYPE, RET_TYPE) \
1920 RET_TYPE __kmpc_atomic_##TYPE_ID##_##OP_ID(ident_t *id_ref, int gtid, \
1922 KMP_DEBUG_ASSERT(__kmp_init_serial); \
1923 KA_TRACE(100, ("__kmpc_atomic_" #TYPE_ID "_" #OP_ID ": T#%d\n", gtid));
1934#define OP_CMPXCHG_READ(TYPE, BITS, OP) \
1936 TYPE KMP_ATOMIC_VOLATILE temp_val; \
1939 kmp_int##BITS i_val; \
1941 union f_i_union old_value; \
1943 old_value.f_val = temp_val; \
1944 old_value.i_val = KMP_COMPARE_AND_STORE_RET##BITS( \
1945 (kmp_int##BITS *)loc, \
1946 *VOLATILE_CAST(kmp_int##BITS *) & old_value.i_val, \
1947 *VOLATILE_CAST(kmp_int##BITS *) & old_value.i_val); \
1948 new_value = old_value.f_val; \
1958#define OP_CRITICAL_READ(OP, LCK_ID) \
1959 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
1961 new_value = (*loc); \
1963 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
1966#ifdef KMP_GOMP_COMPAT
1967#define OP_GOMP_CRITICAL_READ(OP, FLAG) \
1968 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
1970 OP_CRITICAL_READ(OP, 0); \
1974#define OP_GOMP_CRITICAL_READ(OP, FLAG)
1978#define ATOMIC_FIXED_READ(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
1979 ATOMIC_BEGIN_READ(TYPE_ID, OP_ID, TYPE, TYPE) \
1981 OP_GOMP_CRITICAL_READ(OP## =, GOMP_FLAG) \
1982 new_value = KMP_TEST_THEN_ADD##BITS(loc, OP 0); \
1986#define ATOMIC_CMPXCHG_READ(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
1987 ATOMIC_BEGIN_READ(TYPE_ID, OP_ID, TYPE, TYPE) \
1989 OP_GOMP_CRITICAL_READ(OP## =, GOMP_FLAG) \
1990 OP_CMPXCHG_READ(TYPE, BITS, OP) \
1998#define ATOMIC_CRITICAL_READ(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
1999 ATOMIC_BEGIN_READ(TYPE_ID, OP_ID, TYPE, TYPE) \
2001 OP_GOMP_CRITICAL_READ(OP## =, GOMP_FLAG) \
2002 OP_CRITICAL_READ(OP, LCK_ID) \
2012#define OP_CRITICAL_READ_WRK(OP, LCK_ID) \
2013 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2017 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
2019#ifdef KMP_GOMP_COMPAT
2020#define OP_GOMP_CRITICAL_READ_WRK(OP, FLAG) \
2021 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
2023 OP_CRITICAL_READ_WRK(OP, 0); \
2026#define OP_GOMP_CRITICAL_READ_WRK(OP, FLAG)
2029#define ATOMIC_BEGIN_READ_WRK(TYPE_ID, OP_ID, TYPE) \
2030 void __kmpc_atomic_##TYPE_ID##_##OP_ID(TYPE *out, ident_t *id_ref, int gtid, \
2032 KMP_DEBUG_ASSERT(__kmp_init_serial); \
2033 KA_TRACE(100, ("__kmpc_atomic_" #TYPE_ID "_" #OP_ID ": T#%d\n", gtid));
2036#define ATOMIC_CRITICAL_READ_WRK(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
2037 ATOMIC_BEGIN_READ_WRK(TYPE_ID, OP_ID, TYPE) \
2038 OP_GOMP_CRITICAL_READ_WRK(OP## =, GOMP_FLAG) \
2039 OP_CRITICAL_READ_WRK(OP, LCK_ID) \
2046ATOMIC_FIXED_READ(fixed4, rd, kmp_int32, 32, +, 0)
2047ATOMIC_FIXED_READ(fixed8, rd, kmp_int64, 64, +,
2049ATOMIC_CMPXCHG_READ(float4, rd, kmp_real32, 32, +,
2051ATOMIC_CMPXCHG_READ(float8, rd, kmp_real64, 64, +,
2055ATOMIC_CMPXCHG_READ(fixed1, rd, kmp_int8, 8, +,
2057ATOMIC_CMPXCHG_READ(fixed2, rd, kmp_int16, 16, +,
2060ATOMIC_CRITICAL_READ(float10, rd,
long double, +, 10r,
2063ATOMIC_CRITICAL_READ(float16, rd, QUAD_LEGACY, +, 16r,
2069ATOMIC_CRITICAL_READ_WRK(cmplx4, rd, kmp_cmplx32, +, 8c,
2072ATOMIC_CRITICAL_READ(cmplx4, rd, kmp_cmplx32, +, 8c,
2075ATOMIC_CRITICAL_READ(cmplx8, rd, kmp_cmplx64, +, 16c,
2077ATOMIC_CRITICAL_READ(cmplx10, rd, kmp_cmplx80, +, 20c,
2080ATOMIC_CRITICAL_READ(cmplx16, rd, CPLX128_LEG, +, 32c,
2083ATOMIC_CRITICAL_READ(float16, a16_rd, Quad_a16_t, +, 16r,
2085ATOMIC_CRITICAL_READ(cmplx16, a16_rd, kmp_cmplx128_a16_t, +, 32c,
2093#define ATOMIC_XCHG_WR(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2094 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
2095 OP_GOMP_CRITICAL(OP, GOMP_FLAG) \
2096 KMP_XCHG_FIXED##BITS(lhs, rhs); \
2099#define ATOMIC_XCHG_FLOAT_WR(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2100 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
2101 OP_GOMP_CRITICAL(OP, GOMP_FLAG) \
2102 KMP_XCHG_REAL##BITS(lhs, rhs); \
2112#define OP_CMPXCHG_WR(TYPE, BITS, OP) \
2114 TYPE KMP_ATOMIC_VOLATILE temp_val; \
2115 TYPE old_value, new_value; \
2117 old_value = temp_val; \
2119 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
2120 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2121 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
2123 old_value = temp_val; \
2129#define ATOMIC_CMPXCHG_WR(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2130 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
2131 OP_GOMP_CRITICAL(OP, GOMP_FLAG) \
2132 OP_CMPXCHG_WR(TYPE, BITS, OP) \
2141#define ATOMIC_CRITICAL_WR(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
2142 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
2143 OP_GOMP_CRITICAL(OP, GOMP_FLAG) \
2144 OP_CRITICAL(OP, LCK_ID) \
2148ATOMIC_XCHG_WR(fixed1, wr, kmp_int8, 8, =,
2150ATOMIC_XCHG_WR(fixed2, wr, kmp_int16, 16, =,
2152ATOMIC_XCHG_WR(fixed4, wr, kmp_int32, 32, =,
2155ATOMIC_CMPXCHG_WR(fixed8, wr, kmp_int64, 64, =,
2158ATOMIC_XCHG_WR(fixed8, wr, kmp_int64, 64, =,
2162ATOMIC_XCHG_FLOAT_WR(float4, wr, kmp_real32, 32, =,
2165ATOMIC_CMPXCHG_WR(float8, wr, kmp_real64, 64, =,
2168ATOMIC_XCHG_FLOAT_WR(float8, wr, kmp_real64, 64, =,
2172ATOMIC_CRITICAL_WR(float10, wr,
long double, =, 10r,
2175ATOMIC_CRITICAL_WR(float16, wr, QUAD_LEGACY, =, 16r,
2178ATOMIC_CRITICAL_WR(cmplx4, wr, kmp_cmplx32, =, 8c, 1)
2179ATOMIC_CRITICAL_WR(cmplx8, wr, kmp_cmplx64, =, 16c,
2181ATOMIC_CRITICAL_WR(cmplx10, wr, kmp_cmplx80, =, 20c,
2184ATOMIC_CRITICAL_WR(cmplx16, wr, CPLX128_LEG, =, 32c,
2187ATOMIC_CRITICAL_WR(float16, a16_wr, Quad_a16_t, =, 16r,
2189ATOMIC_CRITICAL_WR(cmplx16, a16_wr, kmp_cmplx128_a16_t, =, 32c,
2202#define ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, RET_TYPE) \
2203 RET_TYPE __kmpc_atomic_##TYPE_ID##_##OP_ID(ident_t *id_ref, int gtid, \
2204 TYPE *lhs, TYPE rhs, int flag) { \
2205 KMP_DEBUG_ASSERT(__kmp_init_serial); \
2206 KA_TRACE(100, ("__kmpc_atomic_" #TYPE_ID "_" #OP_ID ": T#%d\n", gtid));
2214#define OP_CRITICAL_CPT(OP, LCK_ID) \
2215 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2219 new_value = (*lhs); \
2221 new_value = (*lhs); \
2225 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2228#define OP_UPDATE_CRITICAL_CPT(TYPE, OP, LCK_ID) \
2229 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2232 (*lhs) = (TYPE)((*lhs)OP rhs); \
2233 new_value = (*lhs); \
2235 new_value = (*lhs); \
2236 (*lhs) = (TYPE)((*lhs)OP rhs); \
2239 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2243#ifdef KMP_GOMP_COMPAT
2244#define OP_GOMP_CRITICAL_CPT(TYPE, OP, FLAG) \
2245 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
2247 OP_UPDATE_CRITICAL_CPT(TYPE, OP, 0); \
2250#define OP_GOMP_CRITICAL_CPT(TYPE, OP, FLAG)
2260#define OP_CMPXCHG_CPT(TYPE, BITS, OP) \
2262 TYPE KMP_ATOMIC_VOLATILE temp_val; \
2263 TYPE old_value, new_value; \
2265 old_value = temp_val; \
2266 new_value = (TYPE)(old_value OP rhs); \
2267 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
2268 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2269 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
2271 old_value = temp_val; \
2272 new_value = (TYPE)(old_value OP rhs); \
2281#define ATOMIC_CMPXCHG_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2282 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
2285 OP_GOMP_CRITICAL_CPT(TYPE, OP, GOMP_FLAG) \
2286 OP_CMPXCHG_CPT(TYPE, BITS, OP) \
2290#define ATOMIC_FIXED_ADD_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2291 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
2292 TYPE old_value, new_value; \
2294 OP_GOMP_CRITICAL_CPT(TYPE, OP, GOMP_FLAG) \
2296 old_value = KMP_TEST_THEN_ADD##BITS(lhs, OP rhs); \
2298 return old_value OP rhs; \
2304ATOMIC_FIXED_ADD_CPT(fixed4, add_cpt, kmp_int32, 32, +,
2306ATOMIC_FIXED_ADD_CPT(fixed4, sub_cpt, kmp_int32, 32, -,
2308ATOMIC_FIXED_ADD_CPT(fixed8, add_cpt, kmp_int64, 64, +,
2310ATOMIC_FIXED_ADD_CPT(fixed8, sub_cpt, kmp_int64, 64, -,
2313ATOMIC_CMPXCHG_CPT(float4, add_cpt, kmp_real32, 32, +,
2315ATOMIC_CMPXCHG_CPT(float4, sub_cpt, kmp_real32, 32, -,
2317ATOMIC_CMPXCHG_CPT(float8, add_cpt, kmp_real64, 64, +,
2319ATOMIC_CMPXCHG_CPT(float8, sub_cpt, kmp_real64, 64, -,
2334ATOMIC_CMPXCHG_CPT(fixed1, add_cpt, kmp_int8, 8, +,
2336ATOMIC_CMPXCHG_CPT(fixed1, andb_cpt, kmp_int8, 8, &,
2338ATOMIC_CMPXCHG_CPT(fixed1, div_cpt, kmp_int8, 8, /,
2340ATOMIC_CMPXCHG_CPT(fixed1u, div_cpt, kmp_uint8, 8, /,
2342ATOMIC_CMPXCHG_CPT(fixed1, mul_cpt, kmp_int8, 8, *,
2344ATOMIC_CMPXCHG_CPT(fixed1, orb_cpt, kmp_int8, 8, |,
2346ATOMIC_CMPXCHG_CPT(fixed1, shl_cpt, kmp_int8, 8, <<,
2348ATOMIC_CMPXCHG_CPT(fixed1, shr_cpt, kmp_int8, 8, >>,
2350ATOMIC_CMPXCHG_CPT(fixed1u, shr_cpt, kmp_uint8, 8, >>,
2352ATOMIC_CMPXCHG_CPT(fixed1, sub_cpt, kmp_int8, 8, -,
2354ATOMIC_CMPXCHG_CPT(fixed1, xor_cpt, kmp_int8, 8, ^,
2356ATOMIC_CMPXCHG_CPT(fixed2, add_cpt, kmp_int16, 16, +,
2358ATOMIC_CMPXCHG_CPT(fixed2, andb_cpt, kmp_int16, 16, &,
2360ATOMIC_CMPXCHG_CPT(fixed2, div_cpt, kmp_int16, 16, /,
2362ATOMIC_CMPXCHG_CPT(fixed2u, div_cpt, kmp_uint16, 16, /,
2364ATOMIC_CMPXCHG_CPT(fixed2, mul_cpt, kmp_int16, 16, *,
2366ATOMIC_CMPXCHG_CPT(fixed2, orb_cpt, kmp_int16, 16, |,
2368ATOMIC_CMPXCHG_CPT(fixed2, shl_cpt, kmp_int16, 16, <<,
2370ATOMIC_CMPXCHG_CPT(fixed2, shr_cpt, kmp_int16, 16, >>,
2372ATOMIC_CMPXCHG_CPT(fixed2u, shr_cpt, kmp_uint16, 16, >>,
2374ATOMIC_CMPXCHG_CPT(fixed2, sub_cpt, kmp_int16, 16, -,
2376ATOMIC_CMPXCHG_CPT(fixed2, xor_cpt, kmp_int16, 16, ^,
2378ATOMIC_CMPXCHG_CPT(fixed4, andb_cpt, kmp_int32, 32, &,
2380ATOMIC_CMPXCHG_CPT(fixed4, div_cpt, kmp_int32, 32, /,
2382ATOMIC_CMPXCHG_CPT(fixed4u, div_cpt, kmp_uint32, 32, /,
2384ATOMIC_CMPXCHG_CPT(fixed4, mul_cpt, kmp_int32, 32, *,
2386ATOMIC_CMPXCHG_CPT(fixed4, orb_cpt, kmp_int32, 32, |,
2388ATOMIC_CMPXCHG_CPT(fixed4, shl_cpt, kmp_int32, 32, <<,
2390ATOMIC_CMPXCHG_CPT(fixed4, shr_cpt, kmp_int32, 32, >>,
2392ATOMIC_CMPXCHG_CPT(fixed4u, shr_cpt, kmp_uint32, 32, >>,
2394ATOMIC_CMPXCHG_CPT(fixed4, xor_cpt, kmp_int32, 32, ^,
2396ATOMIC_CMPXCHG_CPT(fixed8, andb_cpt, kmp_int64, 64, &,
2398ATOMIC_CMPXCHG_CPT(fixed8, div_cpt, kmp_int64, 64, /,
2400ATOMIC_CMPXCHG_CPT(fixed8u, div_cpt, kmp_uint64, 64, /,
2402ATOMIC_CMPXCHG_CPT(fixed8, mul_cpt, kmp_int64, 64, *,
2404ATOMIC_CMPXCHG_CPT(fixed8, orb_cpt, kmp_int64, 64, |,
2406ATOMIC_CMPXCHG_CPT(fixed8, shl_cpt, kmp_int64, 64, <<,
2408ATOMIC_CMPXCHG_CPT(fixed8, shr_cpt, kmp_int64, 64, >>,
2410ATOMIC_CMPXCHG_CPT(fixed8u, shr_cpt, kmp_uint64, 64, >>,
2412ATOMIC_CMPXCHG_CPT(fixed8, xor_cpt, kmp_int64, 64, ^,
2414ATOMIC_CMPXCHG_CPT(float4, div_cpt, kmp_real32, 32, /,
2416ATOMIC_CMPXCHG_CPT(float4, mul_cpt, kmp_real32, 32, *,
2418ATOMIC_CMPXCHG_CPT(float8, div_cpt, kmp_real64, 64, /,
2420ATOMIC_CMPXCHG_CPT(float8, mul_cpt, kmp_real64, 64, *,
2432#define ATOMIC_BEGIN_CPT_MIX(TYPE_ID, OP_ID, TYPE, RTYPE_ID, RTYPE) \
2433 TYPE __kmpc_atomic_##TYPE_ID##_##OP_ID##_##RTYPE_ID( \
2434 ident_t *id_ref, int gtid, TYPE *lhs, RTYPE rhs, int flag) { \
2435 KMP_DEBUG_ASSERT(__kmp_init_serial); \
2437 ("__kmpc_atomic_" #TYPE_ID "_" #OP_ID "_" #RTYPE_ID ": T#%d\n", \
2441#define ATOMIC_CMPXCHG_CPT_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, \
2442 RTYPE, LCK_ID, MASK, GOMP_FLAG) \
2443 ATOMIC_BEGIN_CPT_MIX(TYPE_ID, OP_ID, TYPE, RTYPE_ID, RTYPE) \
2445 OP_GOMP_CRITICAL_CPT(TYPE, OP, GOMP_FLAG) \
2446 OP_CMPXCHG_CPT(TYPE, BITS, OP) \
2450#define ATOMIC_CRITICAL_CPT_MIX(TYPE_ID, TYPE, OP_ID, OP, RTYPE_ID, RTYPE, \
2451 LCK_ID, GOMP_FLAG) \
2452 ATOMIC_BEGIN_CPT_MIX(TYPE_ID, OP_ID, TYPE, RTYPE_ID, RTYPE) \
2454 OP_GOMP_CRITICAL_CPT(TYPE, OP, GOMP_FLAG) \
2455 OP_UPDATE_CRITICAL_CPT(TYPE, OP, LCK_ID) \
2458ATOMIC_CMPXCHG_CPT_MIX(fixed1,
char, add_cpt, 8, +, fp, _Quad, 1i, 0,
2460ATOMIC_CMPXCHG_CPT_MIX(fixed1u, uchar, add_cpt, 8, +, fp, _Quad, 1i, 0,
2462ATOMIC_CMPXCHG_CPT_MIX(fixed1,
char, sub_cpt, 8, -, fp, _Quad, 1i, 0,
2464ATOMIC_CMPXCHG_CPT_MIX(fixed1u, uchar, sub_cpt, 8, -, fp, _Quad, 1i, 0,
2466ATOMIC_CMPXCHG_CPT_MIX(fixed1,
char, mul_cpt, 8, *, fp, _Quad, 1i, 0,
2468ATOMIC_CMPXCHG_CPT_MIX(fixed1u, uchar, mul_cpt, 8, *, fp, _Quad, 1i, 0,
2470ATOMIC_CMPXCHG_CPT_MIX(fixed1,
char, div_cpt, 8, /, fp, _Quad, 1i, 0,
2472ATOMIC_CMPXCHG_CPT_MIX(fixed1u, uchar, div_cpt, 8, /, fp, _Quad, 1i, 0,
2475ATOMIC_CMPXCHG_CPT_MIX(fixed2,
short, add_cpt, 16, +, fp, _Quad, 2i, 1,
2477ATOMIC_CMPXCHG_CPT_MIX(fixed2u, ushort, add_cpt, 16, +, fp, _Quad, 2i, 1,
2479ATOMIC_CMPXCHG_CPT_MIX(fixed2,
short, sub_cpt, 16, -, fp, _Quad, 2i, 1,
2481ATOMIC_CMPXCHG_CPT_MIX(fixed2u, ushort, sub_cpt, 16, -, fp, _Quad, 2i, 1,
2483ATOMIC_CMPXCHG_CPT_MIX(fixed2,
short, mul_cpt, 16, *, fp, _Quad, 2i, 1,
2485ATOMIC_CMPXCHG_CPT_MIX(fixed2u, ushort, mul_cpt, 16, *, fp, _Quad, 2i, 1,
2487ATOMIC_CMPXCHG_CPT_MIX(fixed2,
short, div_cpt, 16, /, fp, _Quad, 2i, 1,
2489ATOMIC_CMPXCHG_CPT_MIX(fixed2u, ushort, div_cpt, 16, /, fp, _Quad, 2i, 1,
2492ATOMIC_CMPXCHG_CPT_MIX(fixed4, kmp_int32, add_cpt, 32, +, fp, _Quad, 4i, 3,
2494ATOMIC_CMPXCHG_CPT_MIX(fixed4u, kmp_uint32, add_cpt, 32, +, fp, _Quad, 4i, 3,
2496ATOMIC_CMPXCHG_CPT_MIX(fixed4, kmp_int32, sub_cpt, 32, -, fp, _Quad, 4i, 3,
2498ATOMIC_CMPXCHG_CPT_MIX(fixed4u, kmp_uint32, sub_cpt, 32, -, fp, _Quad, 4i, 3,
2500ATOMIC_CMPXCHG_CPT_MIX(fixed4, kmp_int32, mul_cpt, 32, *, fp, _Quad, 4i, 3,
2502ATOMIC_CMPXCHG_CPT_MIX(fixed4u, kmp_uint32, mul_cpt, 32, *, fp, _Quad, 4i, 3,
2504ATOMIC_CMPXCHG_CPT_MIX(fixed4, kmp_int32, div_cpt, 32, /, fp, _Quad, 4i, 3,
2506ATOMIC_CMPXCHG_CPT_MIX(fixed4u, kmp_uint32, div_cpt, 32, /, fp, _Quad, 4i, 3,
2509ATOMIC_CMPXCHG_CPT_MIX(fixed8, kmp_int64, add_cpt, 64, +, fp, _Quad, 8i, 7,
2511ATOMIC_CMPXCHG_CPT_MIX(fixed8u, kmp_uint64, add_cpt, 64, +, fp, _Quad, 8i, 7,
2513ATOMIC_CMPXCHG_CPT_MIX(fixed8, kmp_int64, sub_cpt, 64, -, fp, _Quad, 8i, 7,
2515ATOMIC_CMPXCHG_CPT_MIX(fixed8u, kmp_uint64, sub_cpt, 64, -, fp, _Quad, 8i, 7,
2517ATOMIC_CMPXCHG_CPT_MIX(fixed8, kmp_int64, mul_cpt, 64, *, fp, _Quad, 8i, 7,
2519ATOMIC_CMPXCHG_CPT_MIX(fixed8u, kmp_uint64, mul_cpt, 64, *, fp, _Quad, 8i, 7,
2521ATOMIC_CMPXCHG_CPT_MIX(fixed8, kmp_int64, div_cpt, 64, /, fp, _Quad, 8i, 7,
2523ATOMIC_CMPXCHG_CPT_MIX(fixed8u, kmp_uint64, div_cpt, 64, /, fp, _Quad, 8i, 7,
2526ATOMIC_CMPXCHG_CPT_MIX(float4, kmp_real32, add_cpt, 32, +, fp, _Quad, 4r, 3,
2528ATOMIC_CMPXCHG_CPT_MIX(float4, kmp_real32, sub_cpt, 32, -, fp, _Quad, 4r, 3,
2530ATOMIC_CMPXCHG_CPT_MIX(float4, kmp_real32, mul_cpt, 32, *, fp, _Quad, 4r, 3,
2532ATOMIC_CMPXCHG_CPT_MIX(float4, kmp_real32, div_cpt, 32, /, fp, _Quad, 4r, 3,
2535ATOMIC_CMPXCHG_CPT_MIX(float8, kmp_real64, add_cpt, 64, +, fp, _Quad, 8r, 7,
2537ATOMIC_CMPXCHG_CPT_MIX(float8, kmp_real64, sub_cpt, 64, -, fp, _Quad, 8r, 7,
2539ATOMIC_CMPXCHG_CPT_MIX(float8, kmp_real64, mul_cpt, 64, *, fp, _Quad, 8r, 7,
2541ATOMIC_CMPXCHG_CPT_MIX(float8, kmp_real64, div_cpt, 64, /, fp, _Quad, 8r, 7,
2544ATOMIC_CRITICAL_CPT_MIX(float10,
long double, add_cpt, +, fp, _Quad, 10r,
2546ATOMIC_CRITICAL_CPT_MIX(float10,
long double, sub_cpt, -, fp, _Quad, 10r,
2548ATOMIC_CRITICAL_CPT_MIX(float10,
long double, mul_cpt, *, fp, _Quad, 10r,
2550ATOMIC_CRITICAL_CPT_MIX(float10,
long double, div_cpt, /, fp, _Quad, 10r,
2564#define OP_CRITICAL_L_CPT(OP, LCK_ID) \
2565 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2569 (*lhs) = new_value; \
2571 new_value = (*lhs); \
2575 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
2578#ifdef KMP_GOMP_COMPAT
2579#define OP_GOMP_CRITICAL_L_CPT(OP, FLAG) \
2580 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
2582 OP_CRITICAL_L_CPT(OP, 0); \
2586#define OP_GOMP_CRITICAL_L_CPT(OP, FLAG)
2591#define ATOMIC_CMPX_L_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2592 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
2595 OP_GOMP_CRITICAL_L_CPT(= *lhs OP, GOMP_FLAG) \
2596 OP_CMPXCHG_CPT(TYPE, BITS, OP) \
2599ATOMIC_CMPX_L_CPT(fixed1, andl_cpt,
char, 8, &&,
2601ATOMIC_CMPX_L_CPT(fixed1, orl_cpt,
char, 8, ||,
2603ATOMIC_CMPX_L_CPT(fixed2, andl_cpt,
short, 16, &&,
2605ATOMIC_CMPX_L_CPT(fixed2, orl_cpt,
short, 16, ||,
2607ATOMIC_CMPX_L_CPT(fixed4, andl_cpt, kmp_int32, 32, &&,
2609ATOMIC_CMPX_L_CPT(fixed4, orl_cpt, kmp_int32, 32, ||,
2611ATOMIC_CMPX_L_CPT(fixed8, andl_cpt, kmp_int64, 64, &&,
2613ATOMIC_CMPX_L_CPT(fixed8, orl_cpt, kmp_int64, 64, ||,
2625#define MIN_MAX_CRITSECT_CPT(OP, LCK_ID) \
2626 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2628 if (*lhs OP rhs) { \
2634 new_value = old_value; \
2638 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2642#ifdef KMP_GOMP_COMPAT
2643#define GOMP_MIN_MAX_CRITSECT_CPT(OP, FLAG) \
2644 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
2646 MIN_MAX_CRITSECT_CPT(OP, 0); \
2649#define GOMP_MIN_MAX_CRITSECT_CPT(OP, FLAG)
2653#define MIN_MAX_CMPXCHG_CPT(TYPE, BITS, OP) \
2655 TYPE KMP_ATOMIC_VOLATILE temp_val; \
2658 old_value = temp_val; \
2659 while (old_value OP rhs && \
2660 !KMP_COMPARE_AND_STORE_ACQ##BITS( \
2661 (kmp_int##BITS *)lhs, \
2662 *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2663 *VOLATILE_CAST(kmp_int##BITS *) & rhs)) { \
2665 old_value = temp_val; \
2675#define MIN_MAX_CRITICAL_CPT(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
2676 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
2677 TYPE new_value, old_value; \
2678 if (*lhs OP rhs) { \
2679 GOMP_MIN_MAX_CRITSECT_CPT(OP, GOMP_FLAG) \
2680 MIN_MAX_CRITSECT_CPT(OP, LCK_ID) \
2685#define MIN_MAX_COMPXCHG_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2686 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
2687 TYPE new_value, old_value; \
2689 if (*lhs OP rhs) { \
2690 GOMP_MIN_MAX_CRITSECT_CPT(OP, GOMP_FLAG) \
2691 MIN_MAX_CMPXCHG_CPT(TYPE, BITS, OP) \
2696MIN_MAX_COMPXCHG_CPT(fixed1, max_cpt,
char, 8, <,
2698MIN_MAX_COMPXCHG_CPT(fixed1, min_cpt,
char, 8, >,
2700MIN_MAX_COMPXCHG_CPT(fixed2, max_cpt,
short, 16, <,
2702MIN_MAX_COMPXCHG_CPT(fixed2, min_cpt,
short, 16, >,
2704MIN_MAX_COMPXCHG_CPT(fixed4, max_cpt, kmp_int32, 32, <,
2706MIN_MAX_COMPXCHG_CPT(fixed4, min_cpt, kmp_int32, 32, >,
2708MIN_MAX_COMPXCHG_CPT(fixed8, max_cpt, kmp_int64, 64, <,
2710MIN_MAX_COMPXCHG_CPT(fixed8, min_cpt, kmp_int64, 64, >,
2712MIN_MAX_COMPXCHG_CPT(float4, max_cpt, kmp_real32, 32, <,
2714MIN_MAX_COMPXCHG_CPT(float4, min_cpt, kmp_real32, 32, >,
2716MIN_MAX_COMPXCHG_CPT(float8, max_cpt, kmp_real64, 64, <,
2718MIN_MAX_COMPXCHG_CPT(float8, min_cpt, kmp_real64, 64, >,
2721MIN_MAX_CRITICAL_CPT(float16, max_cpt, QUAD_LEGACY, <, 16r,
2723MIN_MAX_CRITICAL_CPT(float16, min_cpt, QUAD_LEGACY, >, 16r,
2726MIN_MAX_CRITICAL_CPT(float16, max_a16_cpt, Quad_a16_t, <, 16r,
2728MIN_MAX_CRITICAL_CPT(float16, min_a16_cpt, Quad_a16_t, >, 16r,
2734#ifdef KMP_GOMP_COMPAT
2735#define OP_GOMP_CRITICAL_EQV_CPT(OP, FLAG) \
2736 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
2738 OP_CRITICAL_CPT(OP, 0); \
2741#define OP_GOMP_CRITICAL_EQV_CPT(OP, FLAG)
2744#define ATOMIC_CMPX_EQV_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2745 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
2748 OP_GOMP_CRITICAL_EQV_CPT(^= (TYPE) ~, GOMP_FLAG) \
2749 OP_CMPXCHG_CPT(TYPE, BITS, OP) \
2754ATOMIC_CMPXCHG_CPT(fixed1, neqv_cpt, kmp_int8, 8, ^,
2756ATOMIC_CMPXCHG_CPT(fixed2, neqv_cpt, kmp_int16, 16, ^,
2758ATOMIC_CMPXCHG_CPT(fixed4, neqv_cpt, kmp_int32, 32, ^,
2760ATOMIC_CMPXCHG_CPT(fixed8, neqv_cpt, kmp_int64, 64, ^,
2762ATOMIC_CMPX_EQV_CPT(fixed1, eqv_cpt, kmp_int8, 8, ^~,
2764ATOMIC_CMPX_EQV_CPT(fixed2, eqv_cpt, kmp_int16, 16, ^~,
2766ATOMIC_CMPX_EQV_CPT(fixed4, eqv_cpt, kmp_int32, 32, ^~,
2768ATOMIC_CMPX_EQV_CPT(fixed8, eqv_cpt, kmp_int64, 64, ^~,
2777#define ATOMIC_CRITICAL_CPT(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
2778 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
2780 OP_GOMP_CRITICAL_CPT(TYPE, OP, GOMP_FLAG) \
2781 OP_UPDATE_CRITICAL_CPT(TYPE, OP, LCK_ID) \
2787#define OP_CRITICAL_CPT_WRK(OP, LCK_ID) \
2788 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2798 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2802#ifdef KMP_GOMP_COMPAT
2803#define OP_GOMP_CRITICAL_CPT_WRK(OP, FLAG) \
2804 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
2806 OP_CRITICAL_CPT_WRK(OP## =, 0); \
2809#define OP_GOMP_CRITICAL_CPT_WRK(OP, FLAG)
2813#define ATOMIC_BEGIN_WRK(TYPE_ID, OP_ID, TYPE) \
2814 void __kmpc_atomic_##TYPE_ID##_##OP_ID(ident_t *id_ref, int gtid, TYPE *lhs, \
2815 TYPE rhs, TYPE *out, int flag) { \
2816 KMP_DEBUG_ASSERT(__kmp_init_serial); \
2817 KA_TRACE(100, ("__kmpc_atomic_" #TYPE_ID "_" #OP_ID ": T#%d\n", gtid));
2820#define ATOMIC_CRITICAL_CPT_WRK(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
2821 ATOMIC_BEGIN_WRK(TYPE_ID, OP_ID, TYPE) \
2822 OP_GOMP_CRITICAL_CPT_WRK(OP, GOMP_FLAG) \
2823 OP_CRITICAL_CPT_WRK(OP## =, LCK_ID) \
2829ATOMIC_CRITICAL_CPT(float10, add_cpt,
long double, +, 10r,
2831ATOMIC_CRITICAL_CPT(float10, sub_cpt,
long double, -, 10r,
2833ATOMIC_CRITICAL_CPT(float10, mul_cpt,
long double, *, 10r,
2835ATOMIC_CRITICAL_CPT(float10, div_cpt,
long double, /, 10r,
2839ATOMIC_CRITICAL_CPT(float16, add_cpt, QUAD_LEGACY, +, 16r,
2841ATOMIC_CRITICAL_CPT(float16, sub_cpt, QUAD_LEGACY, -, 16r,
2843ATOMIC_CRITICAL_CPT(float16, mul_cpt, QUAD_LEGACY, *, 16r,
2845ATOMIC_CRITICAL_CPT(float16, div_cpt, QUAD_LEGACY, /, 16r,
2848ATOMIC_CRITICAL_CPT(float16, add_a16_cpt, Quad_a16_t, +, 16r,
2850ATOMIC_CRITICAL_CPT(float16, sub_a16_cpt, Quad_a16_t, -, 16r,
2852ATOMIC_CRITICAL_CPT(float16, mul_a16_cpt, Quad_a16_t, *, 16r,
2854ATOMIC_CRITICAL_CPT(float16, div_a16_cpt, Quad_a16_t, /, 16r,
2862ATOMIC_CRITICAL_CPT_WRK(cmplx4, add_cpt, kmp_cmplx32, +, 8c,
2864ATOMIC_CRITICAL_CPT_WRK(cmplx4, sub_cpt, kmp_cmplx32, -, 8c,
2866ATOMIC_CRITICAL_CPT_WRK(cmplx4, mul_cpt, kmp_cmplx32, *, 8c,
2868ATOMIC_CRITICAL_CPT_WRK(cmplx4, div_cpt, kmp_cmplx32, /, 8c,
2871ATOMIC_CRITICAL_CPT(cmplx8, add_cpt, kmp_cmplx64, +, 16c,
2873ATOMIC_CRITICAL_CPT(cmplx8, sub_cpt, kmp_cmplx64, -, 16c,
2875ATOMIC_CRITICAL_CPT(cmplx8, mul_cpt, kmp_cmplx64, *, 16c,
2877ATOMIC_CRITICAL_CPT(cmplx8, div_cpt, kmp_cmplx64, /, 16c,
2879ATOMIC_CRITICAL_CPT(cmplx10, add_cpt, kmp_cmplx80, +, 20c,
2881ATOMIC_CRITICAL_CPT(cmplx10, sub_cpt, kmp_cmplx80, -, 20c,
2883ATOMIC_CRITICAL_CPT(cmplx10, mul_cpt, kmp_cmplx80, *, 20c,
2885ATOMIC_CRITICAL_CPT(cmplx10, div_cpt, kmp_cmplx80, /, 20c,
2888ATOMIC_CRITICAL_CPT(cmplx16, add_cpt, CPLX128_LEG, +, 32c,
2890ATOMIC_CRITICAL_CPT(cmplx16, sub_cpt, CPLX128_LEG, -, 32c,
2892ATOMIC_CRITICAL_CPT(cmplx16, mul_cpt, CPLX128_LEG, *, 32c,
2894ATOMIC_CRITICAL_CPT(cmplx16, div_cpt, CPLX128_LEG, /, 32c,
2897ATOMIC_CRITICAL_CPT(cmplx16, add_a16_cpt, kmp_cmplx128_a16_t, +, 32c,
2899ATOMIC_CRITICAL_CPT(cmplx16, sub_a16_cpt, kmp_cmplx128_a16_t, -, 32c,
2901ATOMIC_CRITICAL_CPT(cmplx16, mul_a16_cpt, kmp_cmplx128_a16_t, *, 32c,
2903ATOMIC_CRITICAL_CPT(cmplx16, div_a16_cpt, kmp_cmplx128_a16_t, /, 32c,
2918#define OP_CRITICAL_CPT_REV(TYPE, OP, LCK_ID) \
2919 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2923 (*lhs) = (TYPE)((rhs)OP(*lhs)); \
2924 new_value = (*lhs); \
2926 new_value = (*lhs); \
2927 (*lhs) = (TYPE)((rhs)OP(*lhs)); \
2929 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2933#ifdef KMP_GOMP_COMPAT
2934#define OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, FLAG) \
2935 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
2937 OP_CRITICAL_CPT_REV(TYPE, OP, 0); \
2940#define OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, FLAG)
2950#define OP_CMPXCHG_CPT_REV(TYPE, BITS, OP) \
2952 TYPE KMP_ATOMIC_VOLATILE temp_val; \
2953 TYPE old_value, new_value; \
2955 old_value = temp_val; \
2956 new_value = (TYPE)(rhs OP old_value); \
2957 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
2958 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2959 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
2961 old_value = temp_val; \
2962 new_value = (TYPE)(rhs OP old_value); \
2971#define ATOMIC_CMPXCHG_CPT_REV(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2972 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
2975 OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, GOMP_FLAG) \
2976 OP_CMPXCHG_CPT_REV(TYPE, BITS, OP) \
2979ATOMIC_CMPXCHG_CPT_REV(fixed1, div_cpt_rev, kmp_int8, 8, /,
2981ATOMIC_CMPXCHG_CPT_REV(fixed1u, div_cpt_rev, kmp_uint8, 8, /,
2983ATOMIC_CMPXCHG_CPT_REV(fixed1, shl_cpt_rev, kmp_int8, 8, <<,
2985ATOMIC_CMPXCHG_CPT_REV(fixed1, shr_cpt_rev, kmp_int8, 8, >>,
2987ATOMIC_CMPXCHG_CPT_REV(fixed1u, shr_cpt_rev, kmp_uint8, 8, >>,
2989ATOMIC_CMPXCHG_CPT_REV(fixed1, sub_cpt_rev, kmp_int8, 8, -,
2991ATOMIC_CMPXCHG_CPT_REV(fixed2, div_cpt_rev, kmp_int16, 16, /,
2993ATOMIC_CMPXCHG_CPT_REV(fixed2u, div_cpt_rev, kmp_uint16, 16, /,
2995ATOMIC_CMPXCHG_CPT_REV(fixed2, shl_cpt_rev, kmp_int16, 16, <<,
2997ATOMIC_CMPXCHG_CPT_REV(fixed2, shr_cpt_rev, kmp_int16, 16, >>,
2999ATOMIC_CMPXCHG_CPT_REV(fixed2u, shr_cpt_rev, kmp_uint16, 16, >>,
3001ATOMIC_CMPXCHG_CPT_REV(fixed2, sub_cpt_rev, kmp_int16, 16, -,
3003ATOMIC_CMPXCHG_CPT_REV(fixed4, div_cpt_rev, kmp_int32, 32, /,
3005ATOMIC_CMPXCHG_CPT_REV(fixed4u, div_cpt_rev, kmp_uint32, 32, /,
3007ATOMIC_CMPXCHG_CPT_REV(fixed4, shl_cpt_rev, kmp_int32, 32, <<,
3009ATOMIC_CMPXCHG_CPT_REV(fixed4, shr_cpt_rev, kmp_int32, 32, >>,
3011ATOMIC_CMPXCHG_CPT_REV(fixed4u, shr_cpt_rev, kmp_uint32, 32, >>,
3013ATOMIC_CMPXCHG_CPT_REV(fixed4, sub_cpt_rev, kmp_int32, 32, -,
3015ATOMIC_CMPXCHG_CPT_REV(fixed8, div_cpt_rev, kmp_int64, 64, /,
3017ATOMIC_CMPXCHG_CPT_REV(fixed8u, div_cpt_rev, kmp_uint64, 64, /,
3019ATOMIC_CMPXCHG_CPT_REV(fixed8, shl_cpt_rev, kmp_int64, 64, <<,
3021ATOMIC_CMPXCHG_CPT_REV(fixed8, shr_cpt_rev, kmp_int64, 64, >>,
3023ATOMIC_CMPXCHG_CPT_REV(fixed8u, shr_cpt_rev, kmp_uint64, 64, >>,
3025ATOMIC_CMPXCHG_CPT_REV(fixed8, sub_cpt_rev, kmp_int64, 64, -,
3027ATOMIC_CMPXCHG_CPT_REV(float4, div_cpt_rev, kmp_real32, 32, /,
3029ATOMIC_CMPXCHG_CPT_REV(float4, sub_cpt_rev, kmp_real32, 32, -,
3031ATOMIC_CMPXCHG_CPT_REV(float8, div_cpt_rev, kmp_real64, 64, /,
3033ATOMIC_CMPXCHG_CPT_REV(float8, sub_cpt_rev, kmp_real64, 64, -,
3043#define ATOMIC_CRITICAL_CPT_REV(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
3044 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
3047 OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, GOMP_FLAG) \
3048 OP_CRITICAL_CPT_REV(TYPE, OP, LCK_ID) \
3053ATOMIC_CRITICAL_CPT_REV(float10, sub_cpt_rev,
long double, -, 10r,
3055ATOMIC_CRITICAL_CPT_REV(float10, div_cpt_rev,
long double, /, 10r,
3059ATOMIC_CRITICAL_CPT_REV(float16, sub_cpt_rev, QUAD_LEGACY, -, 16r,
3061ATOMIC_CRITICAL_CPT_REV(float16, div_cpt_rev, QUAD_LEGACY, /, 16r,
3064ATOMIC_CRITICAL_CPT_REV(float16, sub_a16_cpt_rev, Quad_a16_t, -, 16r,
3066ATOMIC_CRITICAL_CPT_REV(float16, div_a16_cpt_rev, Quad_a16_t, /, 16r,
3076#define OP_CRITICAL_CPT_REV_WRK(OP, LCK_ID) \
3077 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
3080 (*lhs) = (rhs)OP(*lhs); \
3084 (*lhs) = (rhs)OP(*lhs); \
3087 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
3091#ifdef KMP_GOMP_COMPAT
3092#define OP_GOMP_CRITICAL_CPT_REV_WRK(OP, FLAG) \
3093 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
3095 OP_CRITICAL_CPT_REV_WRK(OP, 0); \
3098#define OP_GOMP_CRITICAL_CPT_REV_WRK(OP, FLAG)
3102#define ATOMIC_CRITICAL_CPT_REV_WRK(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, \
3104 ATOMIC_BEGIN_WRK(TYPE_ID, OP_ID, TYPE) \
3105 OP_GOMP_CRITICAL_CPT_REV_WRK(OP, GOMP_FLAG) \
3106 OP_CRITICAL_CPT_REV_WRK(OP, LCK_ID) \
3112ATOMIC_CRITICAL_CPT_REV_WRK(cmplx4, sub_cpt_rev, kmp_cmplx32, -, 8c,
3114ATOMIC_CRITICAL_CPT_REV_WRK(cmplx4, div_cpt_rev, kmp_cmplx32, /, 8c,
3117ATOMIC_CRITICAL_CPT_REV(cmplx8, sub_cpt_rev, kmp_cmplx64, -, 16c,
3119ATOMIC_CRITICAL_CPT_REV(cmplx8, div_cpt_rev, kmp_cmplx64, /, 16c,
3121ATOMIC_CRITICAL_CPT_REV(cmplx10, sub_cpt_rev, kmp_cmplx80, -, 20c,
3123ATOMIC_CRITICAL_CPT_REV(cmplx10, div_cpt_rev, kmp_cmplx80, /, 20c,
3126ATOMIC_CRITICAL_CPT_REV(cmplx16, sub_cpt_rev, CPLX128_LEG, -, 32c,
3128ATOMIC_CRITICAL_CPT_REV(cmplx16, div_cpt_rev, CPLX128_LEG, /, 32c,
3131ATOMIC_CRITICAL_CPT_REV(cmplx16, sub_a16_cpt_rev, kmp_cmplx128_a16_t, -, 32c,
3133ATOMIC_CRITICAL_CPT_REV(cmplx16, div_a16_cpt_rev, kmp_cmplx128_a16_t, /, 32c,
3147#define ATOMIC_CMPXCHG_CPT_REV_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, \
3148 RTYPE, LCK_ID, MASK, GOMP_FLAG) \
3149 ATOMIC_BEGIN_CPT_MIX(TYPE_ID, OP_ID, TYPE, RTYPE_ID, RTYPE) \
3151 OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, GOMP_FLAG) \
3152 OP_CMPXCHG_CPT_REV(TYPE, BITS, OP) \
3156#define ATOMIC_CRITICAL_CPT_REV_MIX(TYPE_ID, TYPE, OP_ID, OP, RTYPE_ID, RTYPE, \
3157 LCK_ID, GOMP_FLAG) \
3158 ATOMIC_BEGIN_CPT_MIX(TYPE_ID, OP_ID, TYPE, RTYPE_ID, RTYPE) \
3160 OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, GOMP_FLAG) \
3161 OP_CRITICAL_CPT_REV(TYPE, OP, LCK_ID) \
3164ATOMIC_CMPXCHG_CPT_REV_MIX(fixed1,
char, sub_cpt_rev, 8, -, fp, _Quad, 1i, 0,
3166ATOMIC_CMPXCHG_CPT_REV_MIX(fixed1u, uchar, sub_cpt_rev, 8, -, fp, _Quad, 1i, 0,
3168ATOMIC_CMPXCHG_CPT_REV_MIX(fixed1,
char, div_cpt_rev, 8, /, fp, _Quad, 1i, 0,
3170ATOMIC_CMPXCHG_CPT_REV_MIX(fixed1u, uchar, div_cpt_rev, 8, /, fp, _Quad, 1i, 0,
3173ATOMIC_CMPXCHG_CPT_REV_MIX(fixed2,
short, sub_cpt_rev, 16, -, fp, _Quad, 2i, 1,
3175ATOMIC_CMPXCHG_CPT_REV_MIX(fixed2u, ushort, sub_cpt_rev, 16, -, fp, _Quad, 2i,
3178ATOMIC_CMPXCHG_CPT_REV_MIX(fixed2,
short, div_cpt_rev, 16, /, fp, _Quad, 2i, 1,
3180ATOMIC_CMPXCHG_CPT_REV_MIX(fixed2u, ushort, div_cpt_rev, 16, /, fp, _Quad, 2i,
3184ATOMIC_CMPXCHG_CPT_REV_MIX(fixed4, kmp_int32, sub_cpt_rev, 32, -, fp, _Quad, 4i,
3186ATOMIC_CMPXCHG_CPT_REV_MIX(fixed4u, kmp_uint32, sub_cpt_rev, 32, -, fp, _Quad,
3188ATOMIC_CMPXCHG_CPT_REV_MIX(fixed4, kmp_int32, div_cpt_rev, 32, /, fp, _Quad, 4i,
3190ATOMIC_CMPXCHG_CPT_REV_MIX(fixed4u, kmp_uint32, div_cpt_rev, 32, /, fp, _Quad,
3193ATOMIC_CMPXCHG_CPT_REV_MIX(fixed8, kmp_int64, sub_cpt_rev, 64, -, fp, _Quad, 8i,
3196ATOMIC_CMPXCHG_CPT_REV_MIX(fixed8u, kmp_uint64, sub_cpt_rev, 64, -, fp, _Quad,
3199ATOMIC_CMPXCHG_CPT_REV_MIX(fixed8, kmp_int64, div_cpt_rev, 64, /, fp, _Quad, 8i,
3202ATOMIC_CMPXCHG_CPT_REV_MIX(fixed8u, kmp_uint64, div_cpt_rev, 64, /, fp, _Quad,
3206ATOMIC_CMPXCHG_CPT_REV_MIX(float4, kmp_real32, sub_cpt_rev, 32, -, fp, _Quad,
3209ATOMIC_CMPXCHG_CPT_REV_MIX(float4, kmp_real32, div_cpt_rev, 32, /, fp, _Quad,
3213ATOMIC_CMPXCHG_CPT_REV_MIX(float8, kmp_real64, sub_cpt_rev, 64, -, fp, _Quad,
3216ATOMIC_CMPXCHG_CPT_REV_MIX(float8, kmp_real64, div_cpt_rev, 64, /, fp, _Quad,
3220ATOMIC_CRITICAL_CPT_REV_MIX(float10,
long double, sub_cpt_rev, -, fp, _Quad,
3222ATOMIC_CRITICAL_CPT_REV_MIX(float10,
long double, div_cpt_rev, /, fp, _Quad,
3229#define ATOMIC_BEGIN_SWP(TYPE_ID, TYPE) \
3230 TYPE __kmpc_atomic_##TYPE_ID##_swp(ident_t *id_ref, int gtid, TYPE *lhs, \
3232 KMP_DEBUG_ASSERT(__kmp_init_serial); \
3233 KA_TRACE(100, ("__kmpc_atomic_" #TYPE_ID "_swp: T#%d\n", gtid));
3235#define CRITICAL_SWP(LCK_ID) \
3236 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
3238 old_value = (*lhs); \
3241 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
3245#ifdef KMP_GOMP_COMPAT
3246#define GOMP_CRITICAL_SWP(FLAG) \
3247 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
3252#define GOMP_CRITICAL_SWP(FLAG)
3255#define ATOMIC_XCHG_SWP(TYPE_ID, TYPE, BITS, GOMP_FLAG) \
3256 ATOMIC_BEGIN_SWP(TYPE_ID, TYPE) \
3258 GOMP_CRITICAL_SWP(GOMP_FLAG) \
3259 old_value = KMP_XCHG_FIXED##BITS(lhs, rhs); \
3263#define ATOMIC_XCHG_FLOAT_SWP(TYPE_ID, TYPE, BITS, GOMP_FLAG) \
3264 ATOMIC_BEGIN_SWP(TYPE_ID, TYPE) \
3266 GOMP_CRITICAL_SWP(GOMP_FLAG) \
3267 old_value = KMP_XCHG_REAL##BITS(lhs, rhs); \
3272#define CMPXCHG_SWP(TYPE, BITS) \
3274 TYPE KMP_ATOMIC_VOLATILE temp_val; \
3275 TYPE old_value, new_value; \
3277 old_value = temp_val; \
3279 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
3280 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
3281 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
3283 old_value = temp_val; \
3290#define ATOMIC_CMPXCHG_SWP(TYPE_ID, TYPE, BITS, GOMP_FLAG) \
3291 ATOMIC_BEGIN_SWP(TYPE_ID, TYPE) \
3294 GOMP_CRITICAL_SWP(GOMP_FLAG) \
3295 CMPXCHG_SWP(TYPE, BITS) \
3298ATOMIC_XCHG_SWP(fixed1, kmp_int8, 8, KMP_ARCH_X86)
3299ATOMIC_XCHG_SWP(fixed2, kmp_int16, 16, KMP_ARCH_X86)
3300ATOMIC_XCHG_SWP(fixed4, kmp_int32, 32, KMP_ARCH_X86)
3302ATOMIC_XCHG_FLOAT_SWP(float4, kmp_real32, 32,
3306ATOMIC_CMPXCHG_SWP(fixed8, kmp_int64, 64,
3308ATOMIC_CMPXCHG_SWP(float8, kmp_real64, 64,
3311ATOMIC_XCHG_SWP(fixed8, kmp_int64, 64, KMP_ARCH_X86)
3312ATOMIC_XCHG_FLOAT_SWP(float8, kmp_real64, 64,
3319#define ATOMIC_CRITICAL_SWP(TYPE_ID, TYPE, LCK_ID, GOMP_FLAG) \
3320 ATOMIC_BEGIN_SWP(TYPE_ID, TYPE) \
3322 GOMP_CRITICAL_SWP(GOMP_FLAG) \
3323 CRITICAL_SWP(LCK_ID) \
3331#define ATOMIC_BEGIN_SWP_WRK(TYPE_ID, TYPE) \
3332 void __kmpc_atomic_##TYPE_ID##_swp(ident_t *id_ref, int gtid, TYPE *lhs, \
3333 TYPE rhs, TYPE *out) { \
3334 KMP_DEBUG_ASSERT(__kmp_init_serial); \
3335 KA_TRACE(100, ("__kmpc_atomic_" #TYPE_ID "_swp: T#%d\n", gtid));
3337#define CRITICAL_SWP_WRK(LCK_ID) \
3338 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
3343 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
3347#ifdef KMP_GOMP_COMPAT
3348#define GOMP_CRITICAL_SWP_WRK(FLAG) \
3349 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
3351 CRITICAL_SWP_WRK(0); \
3354#define GOMP_CRITICAL_SWP_WRK(FLAG)
3358#define ATOMIC_CRITICAL_SWP_WRK(TYPE_ID, TYPE, LCK_ID, GOMP_FLAG) \
3359 ATOMIC_BEGIN_SWP_WRK(TYPE_ID, TYPE) \
3361 GOMP_CRITICAL_SWP_WRK(GOMP_FLAG) \
3362 CRITICAL_SWP_WRK(LCK_ID) \
3366ATOMIC_CRITICAL_SWP(float10,
long double, 10r, 1)
3368ATOMIC_CRITICAL_SWP(float16, QUAD_LEGACY, 16r, 1)
3371ATOMIC_CRITICAL_SWP_WRK(cmplx4, kmp_cmplx32, 8c, 1)
3376ATOMIC_CRITICAL_SWP(cmplx8, kmp_cmplx64, 16c, 1)
3377ATOMIC_CRITICAL_SWP(cmplx10, kmp_cmplx80, 20c, 1)
3379ATOMIC_CRITICAL_SWP(cmplx16, CPLX128_LEG, 32c, 1)
3381ATOMIC_CRITICAL_SWP(float16_a16, Quad_a16_t, 16r,
3383ATOMIC_CRITICAL_SWP(cmplx16_a16, kmp_cmplx128_a16_t, 32c,
3397void __kmpc_atomic_1(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
3398 void (*f)(
void *,
void *,
void *)) {
3399 KMP_DEBUG_ASSERT(__kmp_init_serial);
3402#
if KMP_ARCH_X86 && defined(KMP_GOMP_COMPAT)
3408 kmp_int8 old_value, new_value;
3410 old_value = *(kmp_int8 *)lhs;
3411 (*f)(&new_value, &old_value, rhs);
3414 while (!KMP_COMPARE_AND_STORE_ACQ8((kmp_int8 *)lhs, *(kmp_int8 *)&old_value,
3415 *(kmp_int8 *)&new_value)) {
3418 old_value = *(kmp_int8 *)lhs;
3419 (*f)(&new_value, &old_value, rhs);
3426#ifdef KMP_GOMP_COMPAT
3427 if (__kmp_atomic_mode == 2) {
3428 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3431 __kmp_acquire_atomic_lock(&__kmp_atomic_lock_1i, gtid);
3433 (*f)(lhs, lhs, rhs);
3435#ifdef KMP_GOMP_COMPAT
3436 if (__kmp_atomic_mode == 2) {
3437 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);
3440 __kmp_release_atomic_lock(&__kmp_atomic_lock_1i, gtid);
3444void __kmpc_atomic_2(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
3445 void (*f)(
void *,
void *,
void *)) {
3447#
if KMP_ARCH_X86 && defined(KMP_GOMP_COMPAT)
3449#elif KMP_ARCH_X86 || KMP_ARCH_X86_64
3452 !((kmp_uintptr_t)lhs & 0x1)
3455 kmp_int16 old_value, new_value;
3457 old_value = *(kmp_int16 *)lhs;
3458 (*f)(&new_value, &old_value, rhs);
3461 while (!KMP_COMPARE_AND_STORE_ACQ16(
3462 (kmp_int16 *)lhs, *(kmp_int16 *)&old_value, *(kmp_int16 *)&new_value)) {
3465 old_value = *(kmp_int16 *)lhs;
3466 (*f)(&new_value, &old_value, rhs);
3473#ifdef KMP_GOMP_COMPAT
3474 if (__kmp_atomic_mode == 2) {
3475 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3478 __kmp_acquire_atomic_lock(&__kmp_atomic_lock_2i, gtid);
3480 (*f)(lhs, lhs, rhs);
3482#ifdef KMP_GOMP_COMPAT
3483 if (__kmp_atomic_mode == 2) {
3484 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);
3487 __kmp_release_atomic_lock(&__kmp_atomic_lock_2i, gtid);
3491void __kmpc_atomic_4(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
3492 void (*f)(
void *,
void *,
void *)) {
3493 KMP_DEBUG_ASSERT(__kmp_init_serial);
3498#
if KMP_ARCH_X86 || KMP_ARCH_X86_64
3501 !((kmp_uintptr_t)lhs & 0x3)
3504 kmp_int32 old_value, new_value;
3506 old_value = *(kmp_int32 *)lhs;
3507 (*f)(&new_value, &old_value, rhs);
3510 while (!KMP_COMPARE_AND_STORE_ACQ32(
3511 (kmp_int32 *)lhs, *(kmp_int32 *)&old_value, *(kmp_int32 *)&new_value)) {
3514 old_value = *(kmp_int32 *)lhs;
3515 (*f)(&new_value, &old_value, rhs);
3523#ifdef KMP_GOMP_COMPAT
3524 if (__kmp_atomic_mode == 2) {
3525 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3528 __kmp_acquire_atomic_lock(&__kmp_atomic_lock_4i, gtid);
3530 (*f)(lhs, lhs, rhs);
3532#ifdef KMP_GOMP_COMPAT
3533 if (__kmp_atomic_mode == 2) {
3534 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);
3537 __kmp_release_atomic_lock(&__kmp_atomic_lock_4i, gtid);
3541void __kmpc_atomic_8(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
3542 void (*f)(
void *,
void *,
void *)) {
3543 KMP_DEBUG_ASSERT(__kmp_init_serial);
3546#
if KMP_ARCH_X86 && defined(KMP_GOMP_COMPAT)
3548#elif KMP_ARCH_X86 || KMP_ARCH_X86_64
3551 !((kmp_uintptr_t)lhs & 0x7)
3554 kmp_int64 old_value, new_value;
3556 old_value = *(kmp_int64 *)lhs;
3557 (*f)(&new_value, &old_value, rhs);
3559 while (!KMP_COMPARE_AND_STORE_ACQ64(
3560 (kmp_int64 *)lhs, *(kmp_int64 *)&old_value, *(kmp_int64 *)&new_value)) {
3563 old_value = *(kmp_int64 *)lhs;
3564 (*f)(&new_value, &old_value, rhs);
3572#ifdef KMP_GOMP_COMPAT
3573 if (__kmp_atomic_mode == 2) {
3574 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3577 __kmp_acquire_atomic_lock(&__kmp_atomic_lock_8i, gtid);
3579 (*f)(lhs, lhs, rhs);
3581#ifdef KMP_GOMP_COMPAT
3582 if (__kmp_atomic_mode == 2) {
3583 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);
3586 __kmp_release_atomic_lock(&__kmp_atomic_lock_8i, gtid);
3590void __kmpc_atomic_10(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
3591 void (*f)(
void *,
void *,
void *)) {
3592 KMP_DEBUG_ASSERT(__kmp_init_serial);
3594#ifdef KMP_GOMP_COMPAT
3595 if (__kmp_atomic_mode == 2) {
3596 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3599 __kmp_acquire_atomic_lock(&__kmp_atomic_lock_10r, gtid);
3601 (*f)(lhs, lhs, rhs);
3603#ifdef KMP_GOMP_COMPAT
3604 if (__kmp_atomic_mode == 2) {
3605 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);
3608 __kmp_release_atomic_lock(&__kmp_atomic_lock_10r, gtid);
3611void __kmpc_atomic_16(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
3612 void (*f)(
void *,
void *,
void *)) {
3613 KMP_DEBUG_ASSERT(__kmp_init_serial);
3615#ifdef KMP_GOMP_COMPAT
3616 if (__kmp_atomic_mode == 2) {
3617 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3620 __kmp_acquire_atomic_lock(&__kmp_atomic_lock_16c, gtid);
3622 (*f)(lhs, lhs, rhs);
3624#ifdef KMP_GOMP_COMPAT
3625 if (__kmp_atomic_mode == 2) {
3626 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);
3629 __kmp_release_atomic_lock(&__kmp_atomic_lock_16c, gtid);
3632void __kmpc_atomic_20(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
3633 void (*f)(
void *,
void *,
void *)) {
3634 KMP_DEBUG_ASSERT(__kmp_init_serial);
3636#ifdef KMP_GOMP_COMPAT
3637 if (__kmp_atomic_mode == 2) {
3638 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3641 __kmp_acquire_atomic_lock(&__kmp_atomic_lock_20c, gtid);
3643 (*f)(lhs, lhs, rhs);
3645#ifdef KMP_GOMP_COMPAT
3646 if (__kmp_atomic_mode == 2) {
3647 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);
3650 __kmp_release_atomic_lock(&__kmp_atomic_lock_20c, gtid);
3653void __kmpc_atomic_32(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
3654 void (*f)(
void *,
void *,
void *)) {
3655 KMP_DEBUG_ASSERT(__kmp_init_serial);
3657#ifdef KMP_GOMP_COMPAT
3658 if (__kmp_atomic_mode == 2) {
3659 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3662 __kmp_acquire_atomic_lock(&__kmp_atomic_lock_32c, gtid);
3664 (*f)(lhs, lhs, rhs);
3666#ifdef KMP_GOMP_COMPAT
3667 if (__kmp_atomic_mode == 2) {
3668 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);
3671 __kmp_release_atomic_lock(&__kmp_atomic_lock_32c, gtid);
3677void __kmpc_atomic_start(
void) {
3678 int gtid = __kmp_entry_gtid();
3679 KA_TRACE(20, (
"__kmpc_atomic_start: T#%d\n", gtid));
3680 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3683void __kmpc_atomic_end(
void) {
3684 int gtid = __kmp_get_gtid();
3685 KA_TRACE(20, (
"__kmpc_atomic_end: T#%d\n", gtid));
3686 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);