15#if (KMP_ARCH_X86 || KMP_ARCH_X86_64 || KMP_ARCH_AARCH64 || KMP_ARCH_ARM)
19kmp_int8 __kmp_test_then_or8(
volatile kmp_int8 *p, kmp_int8 d) {
20 kmp_int8 old_value, new_value;
22 old_value = TCR_1(*p);
23 new_value = old_value | d;
25 while (!KMP_COMPARE_AND_STORE_REL8(p, old_value, new_value)) {
27 old_value = TCR_1(*p);
28 new_value = old_value | d;
33kmp_int8 __kmp_test_then_and8(
volatile kmp_int8 *p, kmp_int8 d) {
34 kmp_int8 old_value, new_value;
36 old_value = TCR_1(*p);
37 new_value = old_value & d;
39 while (!KMP_COMPARE_AND_STORE_REL8(p, old_value, new_value)) {
41 old_value = TCR_1(*p);
42 new_value = old_value & d;
47kmp_uint32 __kmp_test_then_or32(
volatile kmp_uint32 *p, kmp_uint32 d) {
48 kmp_uint32 old_value, new_value;
50 old_value = TCR_4(*p);
51 new_value = old_value | d;
53 while (!KMP_COMPARE_AND_STORE_REL32((
volatile kmp_int32 *)p, old_value,
56 old_value = TCR_4(*p);
57 new_value = old_value | d;
62kmp_uint32 __kmp_test_then_and32(
volatile kmp_uint32 *p, kmp_uint32 d) {
63 kmp_uint32 old_value, new_value;
65 old_value = TCR_4(*p);
66 new_value = old_value & d;
68 while (!KMP_COMPARE_AND_STORE_REL32((
volatile kmp_int32 *)p, old_value,
71 old_value = TCR_4(*p);
72 new_value = old_value & d;
77#if KMP_ARCH_X86 || KMP_ARCH_X86_64
78kmp_int8 __kmp_test_then_add8(
volatile kmp_int8 *p, kmp_int8 d) {
79 kmp_int64 old_value, new_value;
81 old_value = TCR_1(*p);
82 new_value = old_value + d;
83 while (!__kmp_compare_and_store8(p, old_value, new_value)) {
85 old_value = TCR_1(*p);
86 new_value = old_value + d;
92kmp_int64 __kmp_test_then_add64(
volatile kmp_int64 *p, kmp_int64 d) {
93 kmp_int64 old_value, new_value;
95 old_value = TCR_8(*p);
96 new_value = old_value + d;
97 while (!__kmp_compare_and_store64(p, old_value, new_value)) {
99 old_value = TCR_8(*p);
100 new_value = old_value + d;
107kmp_uint64 __kmp_test_then_or64(
volatile kmp_uint64 *p, kmp_uint64 d) {
108 kmp_uint64 old_value, new_value;
110 old_value = TCR_8(*p);
111 new_value = old_value | d;
112 while (!KMP_COMPARE_AND_STORE_REL64((
volatile kmp_int64 *)p, old_value,
115 old_value = TCR_8(*p);
116 new_value = old_value | d;
122kmp_uint64 __kmp_test_then_and64(
volatile kmp_uint64 *p, kmp_uint64 d) {
123 kmp_uint64 old_value, new_value;
125 old_value = TCR_8(*p);
126 new_value = old_value & d;
127 while (!KMP_COMPARE_AND_STORE_REL64((
volatile kmp_int64 *)p, old_value,
130 old_value = TCR_8(*p);
131 new_value = old_value & d;
137#if KMP_ARCH_AARCH64 && KMP_COMPILER_MSVC
140int __kmp_invoke_microtask(microtask_t pkfn,
int gtid,
int tid,
int argc,
144 void **exit_frame_ptr
148 *exit_frame_ptr = OMPT_GET_FRAME_ADDRESS(0);
153 (*pkfn)(>id, &tid);
156 (*pkfn)(>id, &tid, p_argv[0]);
159 (*pkfn)(>id, &tid, p_argv[0], p_argv[1]);
162 (*pkfn)(>id, &tid, p_argv[0], p_argv[1], p_argv[2]);
165 (*pkfn)(>id, &tid, p_argv[0], p_argv[1], p_argv[2], p_argv[3]);
168 (*pkfn)(>id, &tid, p_argv[0], p_argv[1], p_argv[2], p_argv[3], p_argv[4]);
173 size_t len = (argc - 6) *
sizeof(
void *);
174 void *argbuf = alloca(len);
175 memcpy(argbuf, &p_argv[6], len);
179 (*pkfn)(>id, &tid, p_argv[0], p_argv[1], p_argv[2], p_argv[3], p_argv[4],