20 #include "ompt-specific.h" 32 #if defined(__cplusplus) && (KMP_OS_WINDOWS) 39 #define KMP_DO_ALIGN(alignment) 42 #if defined(_MSC_VER) && (_MSC_VER < 1600) && defined(_DEBUG) 47 #define _DEBUG_TEMPORARILY_UNSET_ 52 template <
typename type_lhs,
typename type_rhs>
53 std::complex<type_lhs> __kmp_lhs_div_rhs(
const std::complex<type_lhs> &lhs,
54 const std::complex<type_rhs> &rhs) {
55 type_lhs a = lhs.real();
56 type_lhs b = lhs.imag();
57 type_rhs c = rhs.real();
58 type_rhs d = rhs.imag();
59 type_rhs den = c * c + d * d;
60 type_rhs r = (a * c + b * d);
61 type_rhs i = (b * c - a * d);
62 std::complex<type_lhs> ret(r / den, i / den);
67 struct __kmp_cmplx64_t : std::complex<double> {
69 __kmp_cmplx64_t() :
std::complex<double>() {}
71 __kmp_cmplx64_t(
const std::complex<double> &cd) :
std::complex<double>(cd) {}
73 void operator/=(
const __kmp_cmplx64_t &rhs) {
74 std::complex<double> lhs = *
this;
75 *
this = __kmp_lhs_div_rhs(lhs, rhs);
78 __kmp_cmplx64_t operator/(
const __kmp_cmplx64_t &rhs) {
79 std::complex<double> lhs = *
this;
80 return __kmp_lhs_div_rhs(lhs, rhs);
83 typedef struct __kmp_cmplx64_t kmp_cmplx64;
86 struct __kmp_cmplx32_t : std::complex<float> {
88 __kmp_cmplx32_t() :
std::complex<float>() {}
90 __kmp_cmplx32_t(
const std::complex<float> &cf) :
std::complex<float>(cf) {}
92 __kmp_cmplx32_t operator+(
const __kmp_cmplx32_t &b) {
93 std::complex<float> lhs = *
this;
94 std::complex<float> rhs = b;
97 __kmp_cmplx32_t operator-(
const __kmp_cmplx32_t &b) {
98 std::complex<float> lhs = *
this;
99 std::complex<float> rhs = b;
102 __kmp_cmplx32_t operator*(
const __kmp_cmplx32_t &b) {
103 std::complex<float> lhs = *
this;
104 std::complex<float> rhs = b;
108 __kmp_cmplx32_t operator+(
const kmp_cmplx64 &b) {
109 kmp_cmplx64 t = kmp_cmplx64(*
this) + b;
110 std::complex<double> d(t);
111 std::complex<float> f(d);
112 __kmp_cmplx32_t r(f);
115 __kmp_cmplx32_t operator-(
const kmp_cmplx64 &b) {
116 kmp_cmplx64 t = kmp_cmplx64(*
this) - b;
117 std::complex<double> d(t);
118 std::complex<float> f(d);
119 __kmp_cmplx32_t r(f);
122 __kmp_cmplx32_t operator*(
const kmp_cmplx64 &b) {
123 kmp_cmplx64 t = kmp_cmplx64(*
this) * b;
124 std::complex<double> d(t);
125 std::complex<float> f(d);
126 __kmp_cmplx32_t r(f);
130 void operator/=(
const __kmp_cmplx32_t &rhs) {
131 std::complex<float> lhs = *
this;
132 *
this = __kmp_lhs_div_rhs(lhs, rhs);
135 __kmp_cmplx32_t operator/(
const __kmp_cmplx32_t &rhs) {
136 std::complex<float> lhs = *
this;
137 return __kmp_lhs_div_rhs(lhs, rhs);
140 void operator/=(
const kmp_cmplx64 &rhs) {
141 std::complex<float> lhs = *
this;
142 *
this = __kmp_lhs_div_rhs(lhs, rhs);
145 __kmp_cmplx32_t operator/(
const kmp_cmplx64 &rhs) {
146 std::complex<float> lhs = *
this;
147 return __kmp_lhs_div_rhs(lhs, rhs);
150 typedef struct __kmp_cmplx32_t kmp_cmplx32;
153 struct KMP_DO_ALIGN(16) __kmp_cmplx80_t : std::complex<long double> {
155 __kmp_cmplx80_t() :
std::complex<long double>() {}
157 __kmp_cmplx80_t(
const std::complex<long double> &cld)
158 :
std::complex<long double>(cld) {}
160 void operator/=(
const __kmp_cmplx80_t &rhs) {
161 std::complex<long double> lhs = *
this;
162 *
this = __kmp_lhs_div_rhs(lhs, rhs);
165 __kmp_cmplx80_t operator/(
const __kmp_cmplx80_t &rhs) {
166 std::complex<long double> lhs = *
this;
167 return __kmp_lhs_div_rhs(lhs, rhs);
170 typedef KMP_DO_ALIGN(16) struct __kmp_cmplx80_t kmp_cmplx80;
174 struct __kmp_cmplx128_t : std::complex<_Quad> {
176 __kmp_cmplx128_t() :
std::complex<_Quad>() {}
178 __kmp_cmplx128_t(
const std::complex<_Quad> &cq) :
std::complex<_Quad>(cq) {}
180 void operator/=(
const __kmp_cmplx128_t &rhs) {
181 std::complex<_Quad> lhs = *
this;
182 *
this = __kmp_lhs_div_rhs(lhs, rhs);
185 __kmp_cmplx128_t operator/(
const __kmp_cmplx128_t &rhs) {
186 std::complex<_Quad> lhs = *
this;
187 return __kmp_lhs_div_rhs(lhs, rhs);
190 typedef struct __kmp_cmplx128_t kmp_cmplx128;
193 #ifdef _DEBUG_TEMPORARILY_UNSET_ 194 #undef _DEBUG_TEMPORARILY_UNSET_ 201 typedef float _Complex kmp_cmplx32;
202 typedef double _Complex kmp_cmplx64;
203 typedef long double _Complex kmp_cmplx80;
205 typedef _Quad _Complex kmp_cmplx128;
213 #if KMP_ARCH_X86 && KMP_HAVE_QUAD 217 #pragma pack(push, 4) 219 struct KMP_DO_ALIGN(4) Quad_a4_t {
223 Quad_a4_t(
const _Quad &cq) : q(cq) {}
225 Quad_a4_t operator+(
const Quad_a4_t &b) {
226 _Quad lhs = (*this).q;
228 return (Quad_a4_t)(lhs + rhs);
231 Quad_a4_t operator-(
const Quad_a4_t &b) {
232 _Quad lhs = (*this).q;
234 return (Quad_a4_t)(lhs - rhs);
236 Quad_a4_t operator*(
const Quad_a4_t &b) {
237 _Quad lhs = (*this).q;
239 return (Quad_a4_t)(lhs * rhs);
242 Quad_a4_t operator/(
const Quad_a4_t &b) {
243 _Quad lhs = (*this).q;
245 return (Quad_a4_t)(lhs / rhs);
249 struct KMP_DO_ALIGN(4) kmp_cmplx128_a4_t {
252 kmp_cmplx128_a4_t() : q() {}
254 kmp_cmplx128_a4_t(
const kmp_cmplx128 &c128) : q(c128) {}
256 kmp_cmplx128_a4_t operator+(
const kmp_cmplx128_a4_t &b) {
257 kmp_cmplx128 lhs = (*this).q;
258 kmp_cmplx128 rhs = b.q;
259 return (kmp_cmplx128_a4_t)(lhs + rhs);
261 kmp_cmplx128_a4_t operator-(
const kmp_cmplx128_a4_t &b) {
262 kmp_cmplx128 lhs = (*this).q;
263 kmp_cmplx128 rhs = b.q;
264 return (kmp_cmplx128_a4_t)(lhs - rhs);
266 kmp_cmplx128_a4_t operator*(
const kmp_cmplx128_a4_t &b) {
267 kmp_cmplx128 lhs = (*this).q;
268 kmp_cmplx128 rhs = b.q;
269 return (kmp_cmplx128_a4_t)(lhs * rhs);
272 kmp_cmplx128_a4_t operator/(
const kmp_cmplx128_a4_t &b) {
273 kmp_cmplx128 lhs = (*this).q;
274 kmp_cmplx128 rhs = b.q;
275 return (kmp_cmplx128_a4_t)(lhs / rhs);
282 struct KMP_DO_ALIGN(16) Quad_a16_t {
285 Quad_a16_t() : q() {}
286 Quad_a16_t(
const _Quad &cq) : q(cq) {}
288 Quad_a16_t operator+(
const Quad_a16_t &b) {
289 _Quad lhs = (*this).q;
291 return (Quad_a16_t)(lhs + rhs);
294 Quad_a16_t operator-(
const Quad_a16_t &b) {
295 _Quad lhs = (*this).q;
297 return (Quad_a16_t)(lhs - rhs);
299 Quad_a16_t operator*(
const Quad_a16_t &b) {
300 _Quad lhs = (*this).q;
302 return (Quad_a16_t)(lhs * rhs);
305 Quad_a16_t operator/(
const Quad_a16_t &b) {
306 _Quad lhs = (*this).q;
308 return (Quad_a16_t)(lhs / rhs);
312 struct KMP_DO_ALIGN(16) kmp_cmplx128_a16_t {
315 kmp_cmplx128_a16_t() : q() {}
317 kmp_cmplx128_a16_t(
const kmp_cmplx128 &c128) : q(c128) {}
319 kmp_cmplx128_a16_t operator+(
const kmp_cmplx128_a16_t &b) {
320 kmp_cmplx128 lhs = (*this).q;
321 kmp_cmplx128 rhs = b.q;
322 return (kmp_cmplx128_a16_t)(lhs + rhs);
324 kmp_cmplx128_a16_t operator-(
const kmp_cmplx128_a16_t &b) {
325 kmp_cmplx128 lhs = (*this).q;
326 kmp_cmplx128 rhs = b.q;
327 return (kmp_cmplx128_a16_t)(lhs - rhs);
329 kmp_cmplx128_a16_t operator*(
const kmp_cmplx128_a16_t &b) {
330 kmp_cmplx128 lhs = (*this).q;
331 kmp_cmplx128 rhs = b.q;
332 return (kmp_cmplx128_a16_t)(lhs * rhs);
335 kmp_cmplx128_a16_t operator/(
const kmp_cmplx128_a16_t &b) {
336 kmp_cmplx128 lhs = (*this).q;
337 kmp_cmplx128 rhs = b.q;
338 return (kmp_cmplx128_a16_t)(lhs / rhs);
345 #define QUAD_LEGACY Quad_a4_t 346 #define CPLX128_LEG kmp_cmplx128_a4_t 348 #define QUAD_LEGACY _Quad 349 #define CPLX128_LEG kmp_cmplx128 356 extern int __kmp_atomic_mode;
359 typedef kmp_queuing_lock_t kmp_atomic_lock_t;
361 static inline void __kmp_acquire_atomic_lock(kmp_atomic_lock_t *lck,
363 #if OMPT_SUPPORT && OMPT_OPTIONAL 364 if (ompt_enabled.ompt_callback_mutex_acquire) {
365 ompt_callbacks.ompt_callback(ompt_callback_mutex_acquire)(
366 ompt_mutex_atomic, 0, kmp_mutex_impl_queuing,
367 (ompt_wait_id_t)(uintptr_t)lck, OMPT_GET_RETURN_ADDRESS(0));
371 __kmp_acquire_queuing_lock(lck, gtid);
373 #if OMPT_SUPPORT && OMPT_OPTIONAL 374 if (ompt_enabled.ompt_callback_mutex_acquired) {
375 ompt_callbacks.ompt_callback(ompt_callback_mutex_acquired)(
376 ompt_mutex_atomic, (ompt_wait_id_t)(uintptr_t)lck,
377 OMPT_GET_RETURN_ADDRESS(0));
382 static inline int __kmp_test_atomic_lock(kmp_atomic_lock_t *lck,
384 return __kmp_test_queuing_lock(lck, gtid);
387 static inline void __kmp_release_atomic_lock(kmp_atomic_lock_t *lck,
389 __kmp_release_queuing_lock(lck, gtid);
390 #if OMPT_SUPPORT && OMPT_OPTIONAL 391 if (ompt_enabled.ompt_callback_mutex_released) {
392 ompt_callbacks.ompt_callback(ompt_callback_mutex_released)(
393 ompt_mutex_atomic, (ompt_wait_id_t)(uintptr_t)lck,
394 OMPT_GET_RETURN_ADDRESS(0));
399 static inline void __kmp_init_atomic_lock(kmp_atomic_lock_t *lck) {
400 __kmp_init_queuing_lock(lck);
403 static inline void __kmp_destroy_atomic_lock(kmp_atomic_lock_t *lck) {
404 __kmp_destroy_queuing_lock(lck);
408 extern kmp_atomic_lock_t __kmp_atomic_lock;
410 extern kmp_atomic_lock_t __kmp_atomic_lock_1i;
413 extern kmp_atomic_lock_t __kmp_atomic_lock_2i;
416 extern kmp_atomic_lock_t __kmp_atomic_lock_4i;
419 extern kmp_atomic_lock_t __kmp_atomic_lock_4r;
422 extern kmp_atomic_lock_t __kmp_atomic_lock_8i;
425 extern kmp_atomic_lock_t __kmp_atomic_lock_8r;
428 extern kmp_atomic_lock_t
429 __kmp_atomic_lock_8c;
431 extern kmp_atomic_lock_t
432 __kmp_atomic_lock_10r;
434 extern kmp_atomic_lock_t __kmp_atomic_lock_16r;
437 extern kmp_atomic_lock_t __kmp_atomic_lock_16c;
440 extern kmp_atomic_lock_t
441 __kmp_atomic_lock_20c;
443 extern kmp_atomic_lock_t __kmp_atomic_lock_32c;
450 void __kmpc_atomic_fixed1_add(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
451 void __kmpc_atomic_fixed1_andb(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
452 void __kmpc_atomic_fixed1_div(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
453 void __kmpc_atomic_fixed1u_div(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
455 void __kmpc_atomic_fixed1_mul(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
456 void __kmpc_atomic_fixed1_orb(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
457 void __kmpc_atomic_fixed1_shl(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
458 void __kmpc_atomic_fixed1_shr(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
459 void __kmpc_atomic_fixed1u_shr(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
461 void __kmpc_atomic_fixed1_sub(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
462 void __kmpc_atomic_fixed1_xor(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
464 void __kmpc_atomic_fixed2_add(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
465 void __kmpc_atomic_fixed2_andb(
ident_t *id_ref,
int gtid,
short *lhs,
467 void __kmpc_atomic_fixed2_div(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
468 void __kmpc_atomic_fixed2u_div(
ident_t *id_ref,
int gtid,
unsigned short *lhs,
470 void __kmpc_atomic_fixed2_mul(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
471 void __kmpc_atomic_fixed2_orb(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
472 void __kmpc_atomic_fixed2_shl(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
473 void __kmpc_atomic_fixed2_shr(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
474 void __kmpc_atomic_fixed2u_shr(
ident_t *id_ref,
int gtid,
unsigned short *lhs,
476 void __kmpc_atomic_fixed2_sub(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
477 void __kmpc_atomic_fixed2_xor(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
479 void __kmpc_atomic_fixed4_add(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
481 void __kmpc_atomic_fixed4_sub(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
484 void __kmpc_atomic_float4_add(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
486 void __kmpc_atomic_float4_sub(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
489 void __kmpc_atomic_fixed8_add(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
491 void __kmpc_atomic_fixed8_sub(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
494 void __kmpc_atomic_float8_add(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
496 void __kmpc_atomic_float8_sub(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
499 void __kmpc_atomic_fixed4_andb(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
501 void __kmpc_atomic_fixed4_div(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
503 void __kmpc_atomic_fixed4u_div(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
505 void __kmpc_atomic_fixed4_mul(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
507 void __kmpc_atomic_fixed4_orb(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
509 void __kmpc_atomic_fixed4_shl(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
511 void __kmpc_atomic_fixed4_shr(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
513 void __kmpc_atomic_fixed4u_shr(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
515 void __kmpc_atomic_fixed4_xor(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
518 void __kmpc_atomic_fixed8_andb(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
520 void __kmpc_atomic_fixed8_div(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
522 void __kmpc_atomic_fixed8u_div(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
524 void __kmpc_atomic_fixed8_mul(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
526 void __kmpc_atomic_fixed8_orb(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
528 void __kmpc_atomic_fixed8_shl(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
530 void __kmpc_atomic_fixed8_shr(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
532 void __kmpc_atomic_fixed8u_shr(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
534 void __kmpc_atomic_fixed8_xor(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
537 void __kmpc_atomic_float4_div(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
539 void __kmpc_atomic_float4_mul(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
542 void __kmpc_atomic_float8_div(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
544 void __kmpc_atomic_float8_mul(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
547 void __kmpc_atomic_fixed1_andl(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
548 void __kmpc_atomic_fixed1_orl(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
549 void __kmpc_atomic_fixed2_andl(
ident_t *id_ref,
int gtid,
short *lhs,
551 void __kmpc_atomic_fixed2_orl(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
552 void __kmpc_atomic_fixed4_andl(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
554 void __kmpc_atomic_fixed4_orl(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
556 void __kmpc_atomic_fixed8_andl(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
558 void __kmpc_atomic_fixed8_orl(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
561 void __kmpc_atomic_fixed1_max(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
562 void __kmpc_atomic_fixed1_min(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
563 void __kmpc_atomic_fixed2_max(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
564 void __kmpc_atomic_fixed2_min(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
565 void __kmpc_atomic_fixed4_max(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
567 void __kmpc_atomic_fixed4_min(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
569 void __kmpc_atomic_fixed8_max(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
571 void __kmpc_atomic_fixed8_min(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
573 void __kmpc_atomic_float4_max(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
575 void __kmpc_atomic_float4_min(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
577 void __kmpc_atomic_float8_max(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
579 void __kmpc_atomic_float8_min(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
582 void __kmpc_atomic_float16_max(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
584 void __kmpc_atomic_float16_min(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
589 void __kmpc_atomic_float16_max_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
591 void __kmpc_atomic_float16_min_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
596 void __kmpc_atomic_fixed1_neqv(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
597 void __kmpc_atomic_fixed2_neqv(
ident_t *id_ref,
int gtid,
short *lhs,
599 void __kmpc_atomic_fixed4_neqv(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
601 void __kmpc_atomic_fixed8_neqv(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
604 void __kmpc_atomic_fixed1_eqv(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
605 void __kmpc_atomic_fixed2_eqv(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
606 void __kmpc_atomic_fixed4_eqv(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
608 void __kmpc_atomic_fixed8_eqv(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
611 void __kmpc_atomic_float10_add(
ident_t *id_ref,
int gtid,
long double *lhs,
613 void __kmpc_atomic_float10_sub(
ident_t *id_ref,
int gtid,
long double *lhs,
615 void __kmpc_atomic_float10_mul(
ident_t *id_ref,
int gtid,
long double *lhs,
617 void __kmpc_atomic_float10_div(
ident_t *id_ref,
int gtid,
long double *lhs,
621 void __kmpc_atomic_float16_add(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
623 void __kmpc_atomic_float16_sub(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
625 void __kmpc_atomic_float16_mul(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
627 void __kmpc_atomic_float16_div(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
631 void __kmpc_atomic_float16_add_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
633 void __kmpc_atomic_float16_sub_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
635 void __kmpc_atomic_float16_mul_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
637 void __kmpc_atomic_float16_div_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
642 void __kmpc_atomic_cmplx4_add(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
644 void __kmpc_atomic_cmplx4_sub(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
646 void __kmpc_atomic_cmplx4_mul(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
648 void __kmpc_atomic_cmplx4_div(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
650 void __kmpc_atomic_cmplx8_add(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
652 void __kmpc_atomic_cmplx8_sub(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
654 void __kmpc_atomic_cmplx8_mul(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
656 void __kmpc_atomic_cmplx8_div(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
658 void __kmpc_atomic_cmplx10_add(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
660 void __kmpc_atomic_cmplx10_sub(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
662 void __kmpc_atomic_cmplx10_mul(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
664 void __kmpc_atomic_cmplx10_div(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
667 void __kmpc_atomic_cmplx16_add(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
669 void __kmpc_atomic_cmplx16_sub(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
671 void __kmpc_atomic_cmplx16_mul(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
673 void __kmpc_atomic_cmplx16_div(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
677 void __kmpc_atomic_cmplx16_add_a16(
ident_t *id_ref,
int gtid,
678 kmp_cmplx128_a16_t *lhs,
679 kmp_cmplx128_a16_t rhs);
680 void __kmpc_atomic_cmplx16_sub_a16(
ident_t *id_ref,
int gtid,
681 kmp_cmplx128_a16_t *lhs,
682 kmp_cmplx128_a16_t rhs);
683 void __kmpc_atomic_cmplx16_mul_a16(
ident_t *id_ref,
int gtid,
684 kmp_cmplx128_a16_t *lhs,
685 kmp_cmplx128_a16_t rhs);
686 void __kmpc_atomic_cmplx16_div_a16(
ident_t *id_ref,
int gtid,
687 kmp_cmplx128_a16_t *lhs,
688 kmp_cmplx128_a16_t rhs);
694 #if KMP_ARCH_X86 || KMP_ARCH_X86_64 696 void __kmpc_atomic_fixed1_sub_rev(
ident_t *id_ref,
int gtid,
char *lhs,
698 void __kmpc_atomic_fixed1_div_rev(
ident_t *id_ref,
int gtid,
char *lhs,
700 void __kmpc_atomic_fixed1u_div_rev(
ident_t *id_ref,
int gtid,
701 unsigned char *lhs,
unsigned char rhs);
702 void __kmpc_atomic_fixed1_shl_rev(
ident_t *id_ref,
int gtid,
char *lhs,
704 void __kmpc_atomic_fixed1_shr_rev(
ident_t *id_ref,
int gtid,
char *lhs,
706 void __kmpc_atomic_fixed1u_shr_rev(
ident_t *id_ref,
int gtid,
707 unsigned char *lhs,
unsigned char rhs);
708 void __kmpc_atomic_fixed2_sub_rev(
ident_t *id_ref,
int gtid,
short *lhs,
710 void __kmpc_atomic_fixed2_div_rev(
ident_t *id_ref,
int gtid,
short *lhs,
712 void __kmpc_atomic_fixed2u_div_rev(
ident_t *id_ref,
int gtid,
713 unsigned short *lhs,
unsigned short rhs);
714 void __kmpc_atomic_fixed2_shl_rev(
ident_t *id_ref,
int gtid,
short *lhs,
716 void __kmpc_atomic_fixed2_shr_rev(
ident_t *id_ref,
int gtid,
short *lhs,
718 void __kmpc_atomic_fixed2u_shr_rev(
ident_t *id_ref,
int gtid,
719 unsigned short *lhs,
unsigned short rhs);
720 void __kmpc_atomic_fixed4_sub_rev(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
722 void __kmpc_atomic_fixed4_div_rev(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
724 void __kmpc_atomic_fixed4u_div_rev(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
726 void __kmpc_atomic_fixed4_shl_rev(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
728 void __kmpc_atomic_fixed4_shr_rev(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
730 void __kmpc_atomic_fixed4u_shr_rev(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
732 void __kmpc_atomic_fixed8_sub_rev(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
734 void __kmpc_atomic_fixed8_div_rev(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
736 void __kmpc_atomic_fixed8u_div_rev(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
738 void __kmpc_atomic_fixed8_shl_rev(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
740 void __kmpc_atomic_fixed8_shr_rev(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
742 void __kmpc_atomic_fixed8u_shr_rev(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
744 void __kmpc_atomic_float4_sub_rev(
ident_t *id_ref,
int gtid,
float *lhs,
746 void __kmpc_atomic_float4_div_rev(
ident_t *id_ref,
int gtid,
float *lhs,
748 void __kmpc_atomic_float8_sub_rev(
ident_t *id_ref,
int gtid,
double *lhs,
750 void __kmpc_atomic_float8_div_rev(
ident_t *id_ref,
int gtid,
double *lhs,
752 void __kmpc_atomic_float10_sub_rev(
ident_t *id_ref,
int gtid,
long double *lhs,
754 void __kmpc_atomic_float10_div_rev(
ident_t *id_ref,
int gtid,
long double *lhs,
757 void __kmpc_atomic_float16_sub_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
759 void __kmpc_atomic_float16_div_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
762 void __kmpc_atomic_cmplx4_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
764 void __kmpc_atomic_cmplx4_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
766 void __kmpc_atomic_cmplx8_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
768 void __kmpc_atomic_cmplx8_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
770 void __kmpc_atomic_cmplx10_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
772 void __kmpc_atomic_cmplx10_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
775 void __kmpc_atomic_cmplx16_sub_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
777 void __kmpc_atomic_cmplx16_div_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
781 void __kmpc_atomic_float16_sub_a16_rev(
ident_t *id_ref,
int gtid,
782 Quad_a16_t *lhs, Quad_a16_t rhs);
783 void __kmpc_atomic_float16_div_a16_rev(
ident_t *id_ref,
int gtid,
784 Quad_a16_t *lhs, Quad_a16_t rhs);
785 void __kmpc_atomic_cmplx16_sub_a16_rev(
ident_t *id_ref,
int gtid,
786 kmp_cmplx128_a16_t *lhs,
787 kmp_cmplx128_a16_t rhs);
788 void __kmpc_atomic_cmplx16_div_a16_rev(
ident_t *id_ref,
int gtid,
789 kmp_cmplx128_a16_t *lhs,
790 kmp_cmplx128_a16_t rhs);
792 #endif // KMP_HAVE_QUAD 794 #endif // KMP_ARCH_X86 || KMP_ARCH_X86_64 799 void __kmpc_atomic_fixed1_mul_float8(
ident_t *id_ref,
int gtid,
char *lhs,
801 void __kmpc_atomic_fixed1_div_float8(
ident_t *id_ref,
int gtid,
char *lhs,
803 void __kmpc_atomic_fixed2_mul_float8(
ident_t *id_ref,
int gtid,
short *lhs,
805 void __kmpc_atomic_fixed2_div_float8(
ident_t *id_ref,
int gtid,
short *lhs,
807 void __kmpc_atomic_fixed4_mul_float8(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
809 void __kmpc_atomic_fixed4_div_float8(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
811 void __kmpc_atomic_fixed8_mul_float8(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
813 void __kmpc_atomic_fixed8_div_float8(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
815 void __kmpc_atomic_float4_add_float8(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
817 void __kmpc_atomic_float4_sub_float8(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
819 void __kmpc_atomic_float4_mul_float8(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
821 void __kmpc_atomic_float4_div_float8(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
827 void __kmpc_atomic_fixed1_add_fp(
ident_t *id_ref,
int gtid,
char *lhs,
829 void __kmpc_atomic_fixed1u_add_fp(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
831 void __kmpc_atomic_fixed1_sub_fp(
ident_t *id_ref,
int gtid,
char *lhs,
833 void __kmpc_atomic_fixed1u_sub_fp(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
835 void __kmpc_atomic_fixed1_mul_fp(
ident_t *id_ref,
int gtid,
char *lhs,
837 void __kmpc_atomic_fixed1u_mul_fp(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
839 void __kmpc_atomic_fixed1_div_fp(
ident_t *id_ref,
int gtid,
char *lhs,
841 void __kmpc_atomic_fixed1u_div_fp(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
844 void __kmpc_atomic_fixed2_add_fp(
ident_t *id_ref,
int gtid,
short *lhs,
846 void __kmpc_atomic_fixed2u_add_fp(
ident_t *id_ref,
int gtid,
847 unsigned short *lhs, _Quad rhs);
848 void __kmpc_atomic_fixed2_sub_fp(
ident_t *id_ref,
int gtid,
short *lhs,
850 void __kmpc_atomic_fixed2u_sub_fp(
ident_t *id_ref,
int gtid,
851 unsigned short *lhs, _Quad rhs);
852 void __kmpc_atomic_fixed2_mul_fp(
ident_t *id_ref,
int gtid,
short *lhs,
854 void __kmpc_atomic_fixed2u_mul_fp(
ident_t *id_ref,
int gtid,
855 unsigned short *lhs, _Quad rhs);
856 void __kmpc_atomic_fixed2_div_fp(
ident_t *id_ref,
int gtid,
short *lhs,
858 void __kmpc_atomic_fixed2u_div_fp(
ident_t *id_ref,
int gtid,
859 unsigned short *lhs, _Quad rhs);
861 void __kmpc_atomic_fixed4_add_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
863 void __kmpc_atomic_fixed4u_add_fp(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
865 void __kmpc_atomic_fixed4_sub_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
867 void __kmpc_atomic_fixed4u_sub_fp(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
869 void __kmpc_atomic_fixed4_mul_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
871 void __kmpc_atomic_fixed4u_mul_fp(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
873 void __kmpc_atomic_fixed4_div_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
875 void __kmpc_atomic_fixed4u_div_fp(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
878 void __kmpc_atomic_fixed8_add_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
880 void __kmpc_atomic_fixed8u_add_fp(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
882 void __kmpc_atomic_fixed8_sub_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
884 void __kmpc_atomic_fixed8u_sub_fp(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
886 void __kmpc_atomic_fixed8_mul_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
888 void __kmpc_atomic_fixed8u_mul_fp(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
890 void __kmpc_atomic_fixed8_div_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
892 void __kmpc_atomic_fixed8u_div_fp(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
895 void __kmpc_atomic_float4_add_fp(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
897 void __kmpc_atomic_float4_sub_fp(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
899 void __kmpc_atomic_float4_mul_fp(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
901 void __kmpc_atomic_float4_div_fp(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
904 void __kmpc_atomic_float8_add_fp(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
906 void __kmpc_atomic_float8_sub_fp(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
908 void __kmpc_atomic_float8_mul_fp(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
910 void __kmpc_atomic_float8_div_fp(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
913 void __kmpc_atomic_float10_add_fp(
ident_t *id_ref,
int gtid,
long double *lhs,
915 void __kmpc_atomic_float10_sub_fp(
ident_t *id_ref,
int gtid,
long double *lhs,
917 void __kmpc_atomic_float10_mul_fp(
ident_t *id_ref,
int gtid,
long double *lhs,
919 void __kmpc_atomic_float10_div_fp(
ident_t *id_ref,
int gtid,
long double *lhs,
923 void __kmpc_atomic_fixed1_sub_rev_fp(
ident_t *id_ref,
int gtid,
char *lhs,
925 void __kmpc_atomic_fixed1u_sub_rev_fp(
ident_t *id_ref,
int gtid,
926 unsigned char *lhs, _Quad rhs);
927 void __kmpc_atomic_fixed1_div_rev_fp(
ident_t *id_ref,
int gtid,
char *lhs,
929 void __kmpc_atomic_fixed1u_div_rev_fp(
ident_t *id_ref,
int gtid,
930 unsigned char *lhs, _Quad rhs);
931 void __kmpc_atomic_fixed2_sub_rev_fp(
ident_t *id_ref,
int gtid,
short *lhs,
933 void __kmpc_atomic_fixed2u_sub_rev_fp(
ident_t *id_ref,
int gtid,
934 unsigned short *lhs, _Quad rhs);
935 void __kmpc_atomic_fixed2_div_rev_fp(
ident_t *id_ref,
int gtid,
short *lhs,
937 void __kmpc_atomic_fixed2u_div_rev_fp(
ident_t *id_ref,
int gtid,
938 unsigned short *lhs, _Quad rhs);
939 void __kmpc_atomic_fixed4_sub_rev_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
941 void __kmpc_atomic_fixed4u_sub_rev_fp(
ident_t *id_ref,
int gtid,
942 kmp_uint32 *lhs, _Quad rhs);
943 void __kmpc_atomic_fixed4_div_rev_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
945 void __kmpc_atomic_fixed4u_div_rev_fp(
ident_t *id_ref,
int gtid,
946 kmp_uint32 *lhs, _Quad rhs);
947 void __kmpc_atomic_fixed8_sub_rev_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
949 void __kmpc_atomic_fixed8u_sub_rev_fp(
ident_t *id_ref,
int gtid,
950 kmp_uint64 *lhs, _Quad rhs);
951 void __kmpc_atomic_fixed8_div_rev_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
953 void __kmpc_atomic_fixed8u_div_rev_fp(
ident_t *id_ref,
int gtid,
954 kmp_uint64 *lhs, _Quad rhs);
955 void __kmpc_atomic_float4_sub_rev_fp(
ident_t *id_ref,
int gtid,
float *lhs,
957 void __kmpc_atomic_float4_div_rev_fp(
ident_t *id_ref,
int gtid,
float *lhs,
959 void __kmpc_atomic_float8_sub_rev_fp(
ident_t *id_ref,
int gtid,
double *lhs,
961 void __kmpc_atomic_float8_div_rev_fp(
ident_t *id_ref,
int gtid,
double *lhs,
963 void __kmpc_atomic_float10_sub_rev_fp(
ident_t *id_ref,
int gtid,
964 long double *lhs, _Quad rhs);
965 void __kmpc_atomic_float10_div_rev_fp(
ident_t *id_ref,
int gtid,
966 long double *lhs, _Quad rhs);
968 #endif // KMP_HAVE_QUAD 971 void __kmpc_atomic_cmplx4_add_cmplx8(
ident_t *id_ref,
int gtid,
972 kmp_cmplx32 *lhs, kmp_cmplx64 rhs);
973 void __kmpc_atomic_cmplx4_sub_cmplx8(
ident_t *id_ref,
int gtid,
974 kmp_cmplx32 *lhs, kmp_cmplx64 rhs);
975 void __kmpc_atomic_cmplx4_mul_cmplx8(
ident_t *id_ref,
int gtid,
976 kmp_cmplx32 *lhs, kmp_cmplx64 rhs);
977 void __kmpc_atomic_cmplx4_div_cmplx8(
ident_t *id_ref,
int gtid,
978 kmp_cmplx32 *lhs, kmp_cmplx64 rhs);
981 void __kmpc_atomic_1(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
982 void (*f)(
void *,
void *,
void *));
983 void __kmpc_atomic_2(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
984 void (*f)(
void *,
void *,
void *));
985 void __kmpc_atomic_4(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
986 void (*f)(
void *,
void *,
void *));
987 void __kmpc_atomic_8(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
988 void (*f)(
void *,
void *,
void *));
989 void __kmpc_atomic_10(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
990 void (*f)(
void *,
void *,
void *));
991 void __kmpc_atomic_16(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
992 void (*f)(
void *,
void *,
void *));
993 void __kmpc_atomic_20(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
994 void (*f)(
void *,
void *,
void *));
995 void __kmpc_atomic_32(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
996 void (*f)(
void *,
void *,
void *));
999 #if KMP_ARCH_X86 || KMP_ARCH_X86_64 1002 char __kmpc_atomic_fixed1_rd(
ident_t *id_ref,
int gtid,
char *loc);
1003 short __kmpc_atomic_fixed2_rd(
ident_t *id_ref,
int gtid,
short *loc);
1004 kmp_int32 __kmpc_atomic_fixed4_rd(
ident_t *id_ref,
int gtid, kmp_int32 *loc);
1005 kmp_int64 __kmpc_atomic_fixed8_rd(
ident_t *id_ref,
int gtid, kmp_int64 *loc);
1006 kmp_real32 __kmpc_atomic_float4_rd(
ident_t *id_ref,
int gtid, kmp_real32 *loc);
1007 kmp_real64 __kmpc_atomic_float8_rd(
ident_t *id_ref,
int gtid, kmp_real64 *loc);
1008 long double __kmpc_atomic_float10_rd(
ident_t *id_ref,
int gtid,
1011 QUAD_LEGACY __kmpc_atomic_float16_rd(
ident_t *id_ref,
int gtid,
1016 #if (KMP_OS_WINDOWS) 1017 void __kmpc_atomic_cmplx4_rd(kmp_cmplx32 *out,
ident_t *id_ref,
int gtid,
1020 kmp_cmplx32 __kmpc_atomic_cmplx4_rd(
ident_t *id_ref,
int gtid,
1023 kmp_cmplx64 __kmpc_atomic_cmplx8_rd(
ident_t *id_ref,
int gtid,
1025 kmp_cmplx80 __kmpc_atomic_cmplx10_rd(
ident_t *id_ref,
int gtid,
1028 CPLX128_LEG __kmpc_atomic_cmplx16_rd(
ident_t *id_ref,
int gtid,
1032 Quad_a16_t __kmpc_atomic_float16_a16_rd(
ident_t *id_ref,
int gtid,
1034 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_a16_rd(
ident_t *id_ref,
int gtid,
1035 kmp_cmplx128_a16_t *loc);
1040 void __kmpc_atomic_fixed1_wr(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
1041 void __kmpc_atomic_fixed2_wr(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
1042 void __kmpc_atomic_fixed4_wr(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
1044 void __kmpc_atomic_fixed8_wr(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
1046 void __kmpc_atomic_float4_wr(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
1048 void __kmpc_atomic_float8_wr(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
1050 void __kmpc_atomic_float10_wr(
ident_t *id_ref,
int gtid,
long double *lhs,
1053 void __kmpc_atomic_float16_wr(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
1056 void __kmpc_atomic_cmplx4_wr(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1058 void __kmpc_atomic_cmplx8_wr(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
1060 void __kmpc_atomic_cmplx10_wr(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
1063 void __kmpc_atomic_cmplx16_wr(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
1067 void __kmpc_atomic_float16_a16_wr(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
1069 void __kmpc_atomic_cmplx16_a16_wr(
ident_t *id_ref,
int gtid,
1070 kmp_cmplx128_a16_t *lhs,
1071 kmp_cmplx128_a16_t rhs);
1078 char __kmpc_atomic_fixed1_add_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1079 char rhs,
int flag);
1080 char __kmpc_atomic_fixed1_andb_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1081 char rhs,
int flag);
1082 char __kmpc_atomic_fixed1_div_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1083 char rhs,
int flag);
1084 unsigned char __kmpc_atomic_fixed1u_div_cpt(
ident_t *id_ref,
int gtid,
1086 unsigned char rhs,
int flag);
1087 char __kmpc_atomic_fixed1_mul_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1088 char rhs,
int flag);
1089 char __kmpc_atomic_fixed1_orb_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1090 char rhs,
int flag);
1091 char __kmpc_atomic_fixed1_shl_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1092 char rhs,
int flag);
1093 char __kmpc_atomic_fixed1_shr_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1094 char rhs,
int flag);
1095 unsigned char __kmpc_atomic_fixed1u_shr_cpt(
ident_t *id_ref,
int gtid,
1097 unsigned char rhs,
int flag);
1098 char __kmpc_atomic_fixed1_sub_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1099 char rhs,
int flag);
1100 char __kmpc_atomic_fixed1_xor_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1101 char rhs,
int flag);
1103 short __kmpc_atomic_fixed2_add_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1104 short rhs,
int flag);
1105 short __kmpc_atomic_fixed2_andb_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1106 short rhs,
int flag);
1107 short __kmpc_atomic_fixed2_div_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1108 short rhs,
int flag);
1109 unsigned short __kmpc_atomic_fixed2u_div_cpt(
ident_t *id_ref,
int gtid,
1110 unsigned short *lhs,
1111 unsigned short rhs,
int flag);
1112 short __kmpc_atomic_fixed2_mul_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1113 short rhs,
int flag);
1114 short __kmpc_atomic_fixed2_orb_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1115 short rhs,
int flag);
1116 short __kmpc_atomic_fixed2_shl_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1117 short rhs,
int flag);
1118 short __kmpc_atomic_fixed2_shr_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1119 short rhs,
int flag);
1120 unsigned short __kmpc_atomic_fixed2u_shr_cpt(
ident_t *id_ref,
int gtid,
1121 unsigned short *lhs,
1122 unsigned short rhs,
int flag);
1123 short __kmpc_atomic_fixed2_sub_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1124 short rhs,
int flag);
1125 short __kmpc_atomic_fixed2_xor_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1126 short rhs,
int flag);
1128 kmp_int32 __kmpc_atomic_fixed4_add_cpt(
ident_t *id_ref,
int gtid,
1129 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1130 kmp_int32 __kmpc_atomic_fixed4_sub_cpt(
ident_t *id_ref,
int gtid,
1131 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1133 kmp_real32 __kmpc_atomic_float4_add_cpt(
ident_t *id_ref,
int gtid,
1134 kmp_real32 *lhs, kmp_real32 rhs,
1136 kmp_real32 __kmpc_atomic_float4_sub_cpt(
ident_t *id_ref,
int gtid,
1137 kmp_real32 *lhs, kmp_real32 rhs,
1140 kmp_int64 __kmpc_atomic_fixed8_add_cpt(
ident_t *id_ref,
int gtid,
1141 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1142 kmp_int64 __kmpc_atomic_fixed8_sub_cpt(
ident_t *id_ref,
int gtid,
1143 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1145 kmp_real64 __kmpc_atomic_float8_add_cpt(
ident_t *id_ref,
int gtid,
1146 kmp_real64 *lhs, kmp_real64 rhs,
1148 kmp_real64 __kmpc_atomic_float8_sub_cpt(
ident_t *id_ref,
int gtid,
1149 kmp_real64 *lhs, kmp_real64 rhs,
1152 kmp_int32 __kmpc_atomic_fixed4_andb_cpt(
ident_t *id_ref,
int gtid,
1153 kmp_int32 *lhs, kmp_int32 rhs,
1155 kmp_int32 __kmpc_atomic_fixed4_div_cpt(
ident_t *id_ref,
int gtid,
1156 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1157 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt(
ident_t *id_ref,
int gtid,
1158 kmp_uint32 *lhs, kmp_uint32 rhs,
1160 kmp_int32 __kmpc_atomic_fixed4_mul_cpt(
ident_t *id_ref,
int gtid,
1161 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1162 kmp_int32 __kmpc_atomic_fixed4_orb_cpt(
ident_t *id_ref,
int gtid,
1163 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1164 kmp_int32 __kmpc_atomic_fixed4_shl_cpt(
ident_t *id_ref,
int gtid,
1165 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1166 kmp_int32 __kmpc_atomic_fixed4_shr_cpt(
ident_t *id_ref,
int gtid,
1167 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1168 kmp_uint32 __kmpc_atomic_fixed4u_shr_cpt(
ident_t *id_ref,
int gtid,
1169 kmp_uint32 *lhs, kmp_uint32 rhs,
1171 kmp_int32 __kmpc_atomic_fixed4_xor_cpt(
ident_t *id_ref,
int gtid,
1172 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1174 kmp_int64 __kmpc_atomic_fixed8_andb_cpt(
ident_t *id_ref,
int gtid,
1175 kmp_int64 *lhs, kmp_int64 rhs,
1177 kmp_int64 __kmpc_atomic_fixed8_div_cpt(
ident_t *id_ref,
int gtid,
1178 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1179 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt(
ident_t *id_ref,
int gtid,
1180 kmp_uint64 *lhs, kmp_uint64 rhs,
1182 kmp_int64 __kmpc_atomic_fixed8_mul_cpt(
ident_t *id_ref,
int gtid,
1183 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1184 kmp_int64 __kmpc_atomic_fixed8_orb_cpt(
ident_t *id_ref,
int gtid,
1185 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1186 kmp_int64 __kmpc_atomic_fixed8_shl_cpt(
ident_t *id_ref,
int gtid,
1187 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1188 kmp_int64 __kmpc_atomic_fixed8_shr_cpt(
ident_t *id_ref,
int gtid,
1189 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1190 kmp_uint64 __kmpc_atomic_fixed8u_shr_cpt(
ident_t *id_ref,
int gtid,
1191 kmp_uint64 *lhs, kmp_uint64 rhs,
1193 kmp_int64 __kmpc_atomic_fixed8_xor_cpt(
ident_t *id_ref,
int gtid,
1194 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1196 kmp_real32 __kmpc_atomic_float4_div_cpt(
ident_t *id_ref,
int gtid,
1197 kmp_real32 *lhs, kmp_real32 rhs,
1199 kmp_real32 __kmpc_atomic_float4_mul_cpt(
ident_t *id_ref,
int gtid,
1200 kmp_real32 *lhs, kmp_real32 rhs,
1203 kmp_real64 __kmpc_atomic_float8_div_cpt(
ident_t *id_ref,
int gtid,
1204 kmp_real64 *lhs, kmp_real64 rhs,
1206 kmp_real64 __kmpc_atomic_float8_mul_cpt(
ident_t *id_ref,
int gtid,
1207 kmp_real64 *lhs, kmp_real64 rhs,
1210 char __kmpc_atomic_fixed1_andl_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1211 char rhs,
int flag);
1212 char __kmpc_atomic_fixed1_orl_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1213 char rhs,
int flag);
1214 short __kmpc_atomic_fixed2_andl_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1215 short rhs,
int flag);
1216 short __kmpc_atomic_fixed2_orl_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1217 short rhs,
int flag);
1218 kmp_int32 __kmpc_atomic_fixed4_andl_cpt(
ident_t *id_ref,
int gtid,
1219 kmp_int32 *lhs, kmp_int32 rhs,
1221 kmp_int32 __kmpc_atomic_fixed4_orl_cpt(
ident_t *id_ref,
int gtid,
1222 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1223 kmp_int64 __kmpc_atomic_fixed8_andl_cpt(
ident_t *id_ref,
int gtid,
1224 kmp_int64 *lhs, kmp_int64 rhs,
1226 kmp_int64 __kmpc_atomic_fixed8_orl_cpt(
ident_t *id_ref,
int gtid,
1227 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1229 char __kmpc_atomic_fixed1_max_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1230 char rhs,
int flag);
1231 char __kmpc_atomic_fixed1_min_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1232 char rhs,
int flag);
1233 short __kmpc_atomic_fixed2_max_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1234 short rhs,
int flag);
1235 short __kmpc_atomic_fixed2_min_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1236 short rhs,
int flag);
1237 kmp_int32 __kmpc_atomic_fixed4_max_cpt(
ident_t *id_ref,
int gtid,
1238 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1239 kmp_int32 __kmpc_atomic_fixed4_min_cpt(
ident_t *id_ref,
int gtid,
1240 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1241 kmp_int64 __kmpc_atomic_fixed8_max_cpt(
ident_t *id_ref,
int gtid,
1242 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1243 kmp_int64 __kmpc_atomic_fixed8_min_cpt(
ident_t *id_ref,
int gtid,
1244 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1245 kmp_real32 __kmpc_atomic_float4_max_cpt(
ident_t *id_ref,
int gtid,
1246 kmp_real32 *lhs, kmp_real32 rhs,
1248 kmp_real32 __kmpc_atomic_float4_min_cpt(
ident_t *id_ref,
int gtid,
1249 kmp_real32 *lhs, kmp_real32 rhs,
1251 kmp_real64 __kmpc_atomic_float8_max_cpt(
ident_t *id_ref,
int gtid,
1252 kmp_real64 *lhs, kmp_real64 rhs,
1254 kmp_real64 __kmpc_atomic_float8_min_cpt(
ident_t *id_ref,
int gtid,
1255 kmp_real64 *lhs, kmp_real64 rhs,
1258 QUAD_LEGACY __kmpc_atomic_float16_max_cpt(
ident_t *id_ref,
int gtid,
1259 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1261 QUAD_LEGACY __kmpc_atomic_float16_min_cpt(
ident_t *id_ref,
int gtid,
1262 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1266 char __kmpc_atomic_fixed1_neqv_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1267 char rhs,
int flag);
1268 short __kmpc_atomic_fixed2_neqv_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1269 short rhs,
int flag);
1270 kmp_int32 __kmpc_atomic_fixed4_neqv_cpt(
ident_t *id_ref,
int gtid,
1271 kmp_int32 *lhs, kmp_int32 rhs,
1273 kmp_int64 __kmpc_atomic_fixed8_neqv_cpt(
ident_t *id_ref,
int gtid,
1274 kmp_int64 *lhs, kmp_int64 rhs,
1277 char __kmpc_atomic_fixed1_eqv_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1278 char rhs,
int flag);
1279 short __kmpc_atomic_fixed2_eqv_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1280 short rhs,
int flag);
1281 kmp_int32 __kmpc_atomic_fixed4_eqv_cpt(
ident_t *id_ref,
int gtid,
1282 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1283 kmp_int64 __kmpc_atomic_fixed8_eqv_cpt(
ident_t *id_ref,
int gtid,
1284 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1286 long double __kmpc_atomic_float10_add_cpt(
ident_t *id_ref,
int gtid,
1287 long double *lhs,
long double rhs,
1289 long double __kmpc_atomic_float10_sub_cpt(
ident_t *id_ref,
int gtid,
1290 long double *lhs,
long double rhs,
1292 long double __kmpc_atomic_float10_mul_cpt(
ident_t *id_ref,
int gtid,
1293 long double *lhs,
long double rhs,
1295 long double __kmpc_atomic_float10_div_cpt(
ident_t *id_ref,
int gtid,
1296 long double *lhs,
long double rhs,
1300 QUAD_LEGACY __kmpc_atomic_float16_add_cpt(
ident_t *id_ref,
int gtid,
1301 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1303 QUAD_LEGACY __kmpc_atomic_float16_sub_cpt(
ident_t *id_ref,
int gtid,
1304 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1306 QUAD_LEGACY __kmpc_atomic_float16_mul_cpt(
ident_t *id_ref,
int gtid,
1307 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1309 QUAD_LEGACY __kmpc_atomic_float16_div_cpt(
ident_t *id_ref,
int gtid,
1310 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1316 void __kmpc_atomic_cmplx4_add_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1317 kmp_cmplx32 rhs, kmp_cmplx32 *out,
int flag);
1318 void __kmpc_atomic_cmplx4_sub_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1319 kmp_cmplx32 rhs, kmp_cmplx32 *out,
int flag);
1320 void __kmpc_atomic_cmplx4_mul_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1321 kmp_cmplx32 rhs, kmp_cmplx32 *out,
int flag);
1322 void __kmpc_atomic_cmplx4_div_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1323 kmp_cmplx32 rhs, kmp_cmplx32 *out,
int flag);
1325 kmp_cmplx64 __kmpc_atomic_cmplx8_add_cpt(
ident_t *id_ref,
int gtid,
1326 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1328 kmp_cmplx64 __kmpc_atomic_cmplx8_sub_cpt(
ident_t *id_ref,
int gtid,
1329 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1331 kmp_cmplx64 __kmpc_atomic_cmplx8_mul_cpt(
ident_t *id_ref,
int gtid,
1332 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1334 kmp_cmplx64 __kmpc_atomic_cmplx8_div_cpt(
ident_t *id_ref,
int gtid,
1335 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1337 kmp_cmplx80 __kmpc_atomic_cmplx10_add_cpt(
ident_t *id_ref,
int gtid,
1338 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1340 kmp_cmplx80 __kmpc_atomic_cmplx10_sub_cpt(
ident_t *id_ref,
int gtid,
1341 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1343 kmp_cmplx80 __kmpc_atomic_cmplx10_mul_cpt(
ident_t *id_ref,
int gtid,
1344 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1346 kmp_cmplx80 __kmpc_atomic_cmplx10_div_cpt(
ident_t *id_ref,
int gtid,
1347 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1350 CPLX128_LEG __kmpc_atomic_cmplx16_add_cpt(
ident_t *id_ref,
int gtid,
1351 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1353 CPLX128_LEG __kmpc_atomic_cmplx16_sub_cpt(
ident_t *id_ref,
int gtid,
1354 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1356 CPLX128_LEG __kmpc_atomic_cmplx16_mul_cpt(
ident_t *id_ref,
int gtid,
1357 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1359 CPLX128_LEG __kmpc_atomic_cmplx16_div_cpt(
ident_t *id_ref,
int gtid,
1360 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1364 Quad_a16_t __kmpc_atomic_float16_add_a16_cpt(
ident_t *id_ref,
int gtid,
1365 Quad_a16_t *lhs, Quad_a16_t rhs,
1367 Quad_a16_t __kmpc_atomic_float16_sub_a16_cpt(
ident_t *id_ref,
int gtid,
1368 Quad_a16_t *lhs, Quad_a16_t rhs,
1370 Quad_a16_t __kmpc_atomic_float16_mul_a16_cpt(
ident_t *id_ref,
int gtid,
1371 Quad_a16_t *lhs, Quad_a16_t rhs,
1373 Quad_a16_t __kmpc_atomic_float16_div_a16_cpt(
ident_t *id_ref,
int gtid,
1374 Quad_a16_t *lhs, Quad_a16_t rhs,
1376 Quad_a16_t __kmpc_atomic_float16_max_a16_cpt(
ident_t *id_ref,
int gtid,
1377 Quad_a16_t *lhs, Quad_a16_t rhs,
1379 Quad_a16_t __kmpc_atomic_float16_min_a16_cpt(
ident_t *id_ref,
int gtid,
1380 Quad_a16_t *lhs, Quad_a16_t rhs,
1382 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_add_a16_cpt(
ident_t *id_ref,
int gtid,
1383 kmp_cmplx128_a16_t *lhs,
1384 kmp_cmplx128_a16_t rhs,
1386 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_sub_a16_cpt(
ident_t *id_ref,
int gtid,
1387 kmp_cmplx128_a16_t *lhs,
1388 kmp_cmplx128_a16_t rhs,
1390 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_mul_a16_cpt(
ident_t *id_ref,
int gtid,
1391 kmp_cmplx128_a16_t *lhs,
1392 kmp_cmplx128_a16_t rhs,
1394 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_div_a16_cpt(
ident_t *id_ref,
int gtid,
1395 kmp_cmplx128_a16_t *lhs,
1396 kmp_cmplx128_a16_t rhs,
1401 void __kmpc_atomic_start(
void);
1402 void __kmpc_atomic_end(
void);
1407 char __kmpc_atomic_fixed1_sub_cpt_rev(
ident_t *id_ref,
int gtid,
char *lhs,
1408 char rhs,
int flag);
1409 char __kmpc_atomic_fixed1_div_cpt_rev(
ident_t *id_ref,
int gtid,
char *lhs,
1410 char rhs,
int flag);
1411 unsigned char __kmpc_atomic_fixed1u_div_cpt_rev(
ident_t *id_ref,
int gtid,
1413 unsigned char rhs,
int flag);
1414 char __kmpc_atomic_fixed1_shl_cpt_rev(
ident_t *id_ref,
int gtid,
char *lhs,
1415 char rhs,
int flag);
1416 char __kmpc_atomic_fixed1_shr_cpt_rev(
ident_t *id_ref,
int gtid,
char *lhs,
1417 char rhs,
int flag);
1418 unsigned char __kmpc_atomic_fixed1u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1420 unsigned char rhs,
int flag);
1421 short __kmpc_atomic_fixed2_sub_cpt_rev(
ident_t *id_ref,
int gtid,
short *lhs,
1422 short rhs,
int flag);
1423 short __kmpc_atomic_fixed2_div_cpt_rev(
ident_t *id_ref,
int gtid,
short *lhs,
1424 short rhs,
int flag);
1425 unsigned short __kmpc_atomic_fixed2u_div_cpt_rev(
ident_t *id_ref,
int gtid,
1426 unsigned short *lhs,
1427 unsigned short rhs,
int flag);
1428 short __kmpc_atomic_fixed2_shl_cpt_rev(
ident_t *id_ref,
int gtid,
short *lhs,
1429 short rhs,
int flag);
1430 short __kmpc_atomic_fixed2_shr_cpt_rev(
ident_t *id_ref,
int gtid,
short *lhs,
1431 short rhs,
int flag);
1432 unsigned short __kmpc_atomic_fixed2u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1433 unsigned short *lhs,
1434 unsigned short rhs,
int flag);
1435 kmp_int32 __kmpc_atomic_fixed4_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1436 kmp_int32 *lhs, kmp_int32 rhs,
1438 kmp_int32 __kmpc_atomic_fixed4_div_cpt_rev(
ident_t *id_ref,
int gtid,
1439 kmp_int32 *lhs, kmp_int32 rhs,
1441 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt_rev(
ident_t *id_ref,
int gtid,
1442 kmp_uint32 *lhs, kmp_uint32 rhs,
1444 kmp_int32 __kmpc_atomic_fixed4_shl_cpt_rev(
ident_t *id_ref,
int gtid,
1445 kmp_int32 *lhs, kmp_int32 rhs,
1447 kmp_int32 __kmpc_atomic_fixed4_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1448 kmp_int32 *lhs, kmp_int32 rhs,
1450 kmp_uint32 __kmpc_atomic_fixed4u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1451 kmp_uint32 *lhs, kmp_uint32 rhs,
1453 kmp_int64 __kmpc_atomic_fixed8_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1454 kmp_int64 *lhs, kmp_int64 rhs,
1456 kmp_int64 __kmpc_atomic_fixed8_div_cpt_rev(
ident_t *id_ref,
int gtid,
1457 kmp_int64 *lhs, kmp_int64 rhs,
1459 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt_rev(
ident_t *id_ref,
int gtid,
1460 kmp_uint64 *lhs, kmp_uint64 rhs,
1462 kmp_int64 __kmpc_atomic_fixed8_shl_cpt_rev(
ident_t *id_ref,
int gtid,
1463 kmp_int64 *lhs, kmp_int64 rhs,
1465 kmp_int64 __kmpc_atomic_fixed8_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1466 kmp_int64 *lhs, kmp_int64 rhs,
1468 kmp_uint64 __kmpc_atomic_fixed8u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1469 kmp_uint64 *lhs, kmp_uint64 rhs,
1471 float __kmpc_atomic_float4_sub_cpt_rev(
ident_t *id_ref,
int gtid,
float *lhs,
1472 float rhs,
int flag);
1473 float __kmpc_atomic_float4_div_cpt_rev(
ident_t *id_ref,
int gtid,
float *lhs,
1474 float rhs,
int flag);
1475 double __kmpc_atomic_float8_sub_cpt_rev(
ident_t *id_ref,
int gtid,
double *lhs,
1476 double rhs,
int flag);
1477 double __kmpc_atomic_float8_div_cpt_rev(
ident_t *id_ref,
int gtid,
double *lhs,
1478 double rhs,
int flag);
1479 long double __kmpc_atomic_float10_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1480 long double *lhs,
long double rhs,
1482 long double __kmpc_atomic_float10_div_cpt_rev(
ident_t *id_ref,
int gtid,
1483 long double *lhs,
long double rhs,
1486 QUAD_LEGACY __kmpc_atomic_float16_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1487 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1489 QUAD_LEGACY __kmpc_atomic_float16_div_cpt_rev(
ident_t *id_ref,
int gtid,
1490 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1495 void __kmpc_atomic_cmplx4_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1496 kmp_cmplx32 *lhs, kmp_cmplx32 rhs,
1497 kmp_cmplx32 *out,
int flag);
1498 void __kmpc_atomic_cmplx4_div_cpt_rev(
ident_t *id_ref,
int gtid,
1499 kmp_cmplx32 *lhs, kmp_cmplx32 rhs,
1500 kmp_cmplx32 *out,
int flag);
1501 kmp_cmplx64 __kmpc_atomic_cmplx8_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1502 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1504 kmp_cmplx64 __kmpc_atomic_cmplx8_div_cpt_rev(
ident_t *id_ref,
int gtid,
1505 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1507 kmp_cmplx80 __kmpc_atomic_cmplx10_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1508 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1510 kmp_cmplx80 __kmpc_atomic_cmplx10_div_cpt_rev(
ident_t *id_ref,
int gtid,
1511 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1514 CPLX128_LEG __kmpc_atomic_cmplx16_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1515 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1517 CPLX128_LEG __kmpc_atomic_cmplx16_div_cpt_rev(
ident_t *id_ref,
int gtid,
1518 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1521 Quad_a16_t __kmpc_atomic_float16_sub_a16_cpt_rev(
ident_t *id_ref,
int gtid,
1523 Quad_a16_t rhs,
int flag);
1524 Quad_a16_t __kmpc_atomic_float16_div_a16_cpt_rev(
ident_t *id_ref,
int gtid,
1526 Quad_a16_t rhs,
int flag);
1528 __kmpc_atomic_cmplx16_sub_a16_cpt_rev(
ident_t *id_ref,
int gtid,
1529 kmp_cmplx128_a16_t *lhs,
1530 kmp_cmplx128_a16_t rhs,
int flag);
1532 __kmpc_atomic_cmplx16_div_a16_cpt_rev(
ident_t *id_ref,
int gtid,
1533 kmp_cmplx128_a16_t *lhs,
1534 kmp_cmplx128_a16_t rhs,
int flag);
1539 char __kmpc_atomic_fixed1_swp(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
1540 short __kmpc_atomic_fixed2_swp(
ident_t *id_ref,
int gtid,
short *lhs,
1542 kmp_int32 __kmpc_atomic_fixed4_swp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
1544 kmp_int64 __kmpc_atomic_fixed8_swp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
1546 float __kmpc_atomic_float4_swp(
ident_t *id_ref,
int gtid,
float *lhs,
1548 double __kmpc_atomic_float8_swp(
ident_t *id_ref,
int gtid,
double *lhs,
1550 long double __kmpc_atomic_float10_swp(
ident_t *id_ref,
int gtid,
1551 long double *lhs,
long double rhs);
1553 QUAD_LEGACY __kmpc_atomic_float16_swp(
ident_t *id_ref,
int gtid,
1554 QUAD_LEGACY *lhs, QUAD_LEGACY rhs);
1557 void __kmpc_atomic_cmplx4_swp(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1558 kmp_cmplx32 rhs, kmp_cmplx32 *out);
1562 kmp_cmplx64 __kmpc_atomic_cmplx8_swp(
ident_t *id_ref,
int gtid,
1563 kmp_cmplx64 *lhs, kmp_cmplx64 rhs);
1564 kmp_cmplx80 __kmpc_atomic_cmplx10_swp(
ident_t *id_ref,
int gtid,
1565 kmp_cmplx80 *lhs, kmp_cmplx80 rhs);
1567 CPLX128_LEG __kmpc_atomic_cmplx16_swp(
ident_t *id_ref,
int gtid,
1568 CPLX128_LEG *lhs, CPLX128_LEG rhs);
1570 Quad_a16_t __kmpc_atomic_float16_a16_swp(
ident_t *id_ref,
int gtid,
1571 Quad_a16_t *lhs, Quad_a16_t rhs);
1572 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_a16_swp(
ident_t *id_ref,
int gtid,
1573 kmp_cmplx128_a16_t *lhs,
1574 kmp_cmplx128_a16_t rhs);
1581 char __kmpc_atomic_fixed1_add_cpt_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1582 _Quad rhs,
int flag);
1583 char __kmpc_atomic_fixed1_sub_cpt_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1584 _Quad rhs,
int flag);
1585 char __kmpc_atomic_fixed1_mul_cpt_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1586 _Quad rhs,
int flag);
1587 char __kmpc_atomic_fixed1_div_cpt_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1588 _Quad rhs,
int flag);
1589 unsigned char __kmpc_atomic_fixed1u_add_cpt_fp(
ident_t *id_ref,
int gtid,
1590 unsigned char *lhs, _Quad rhs,
1592 unsigned char __kmpc_atomic_fixed1u_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1593 unsigned char *lhs, _Quad rhs,
1595 unsigned char __kmpc_atomic_fixed1u_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1596 unsigned char *lhs, _Quad rhs,
1598 unsigned char __kmpc_atomic_fixed1u_div_cpt_fp(
ident_t *id_ref,
int gtid,
1599 unsigned char *lhs, _Quad rhs,
1602 short __kmpc_atomic_fixed2_add_cpt_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1603 _Quad rhs,
int flag);
1604 short __kmpc_atomic_fixed2_sub_cpt_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1605 _Quad rhs,
int flag);
1606 short __kmpc_atomic_fixed2_mul_cpt_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1607 _Quad rhs,
int flag);
1608 short __kmpc_atomic_fixed2_div_cpt_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1609 _Quad rhs,
int flag);
1610 unsigned short __kmpc_atomic_fixed2u_add_cpt_fp(
ident_t *id_ref,
int gtid,
1611 unsigned short *lhs, _Quad rhs,
1613 unsigned short __kmpc_atomic_fixed2u_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1614 unsigned short *lhs, _Quad rhs,
1616 unsigned short __kmpc_atomic_fixed2u_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1617 unsigned short *lhs, _Quad rhs,
1619 unsigned short __kmpc_atomic_fixed2u_div_cpt_fp(
ident_t *id_ref,
int gtid,
1620 unsigned short *lhs, _Quad rhs,
1623 kmp_int32 __kmpc_atomic_fixed4_add_cpt_fp(
ident_t *id_ref,
int gtid,
1624 kmp_int32 *lhs, _Quad rhs,
int flag);
1625 kmp_int32 __kmpc_atomic_fixed4_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1626 kmp_int32 *lhs, _Quad rhs,
int flag);
1627 kmp_int32 __kmpc_atomic_fixed4_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1628 kmp_int32 *lhs, _Quad rhs,
int flag);
1629 kmp_int32 __kmpc_atomic_fixed4_div_cpt_fp(
ident_t *id_ref,
int gtid,
1630 kmp_int32 *lhs, _Quad rhs,
int flag);
1631 kmp_uint32 __kmpc_atomic_fixed4u_add_cpt_fp(
ident_t *id_ref,
int gtid,
1632 kmp_uint32 *lhs, _Quad rhs,
1634 kmp_uint32 __kmpc_atomic_fixed4u_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1635 kmp_uint32 *lhs, _Quad rhs,
1637 kmp_uint32 __kmpc_atomic_fixed4u_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1638 kmp_uint32 *lhs, _Quad rhs,
1640 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt_fp(
ident_t *id_ref,
int gtid,
1641 kmp_uint32 *lhs, _Quad rhs,
1644 kmp_int64 __kmpc_atomic_fixed8_add_cpt_fp(
ident_t *id_ref,
int gtid,
1645 kmp_int64 *lhs, _Quad rhs,
int flag);
1646 kmp_int64 __kmpc_atomic_fixed8_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1647 kmp_int64 *lhs, _Quad rhs,
int flag);
1648 kmp_int64 __kmpc_atomic_fixed8_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1649 kmp_int64 *lhs, _Quad rhs,
int flag);
1650 kmp_int64 __kmpc_atomic_fixed8_div_cpt_fp(
ident_t *id_ref,
int gtid,
1651 kmp_int64 *lhs, _Quad rhs,
int flag);
1652 kmp_uint64 __kmpc_atomic_fixed8u_add_cpt_fp(
ident_t *id_ref,
int gtid,
1653 kmp_uint64 *lhs, _Quad rhs,
1655 kmp_uint64 __kmpc_atomic_fixed8u_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1656 kmp_uint64 *lhs, _Quad rhs,
1658 kmp_uint64 __kmpc_atomic_fixed8u_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1659 kmp_uint64 *lhs, _Quad rhs,
1661 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt_fp(
ident_t *id_ref,
int gtid,
1662 kmp_uint64 *lhs, _Quad rhs,
1665 float __kmpc_atomic_float4_add_cpt_fp(
ident_t *id_ref,
int gtid,
1666 kmp_real32 *lhs, _Quad rhs,
int flag);
1667 float __kmpc_atomic_float4_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1668 kmp_real32 *lhs, _Quad rhs,
int flag);
1669 float __kmpc_atomic_float4_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1670 kmp_real32 *lhs, _Quad rhs,
int flag);
1671 float __kmpc_atomic_float4_div_cpt_fp(
ident_t *id_ref,
int gtid,
1672 kmp_real32 *lhs, _Quad rhs,
int flag);
1674 double __kmpc_atomic_float8_add_cpt_fp(
ident_t *id_ref,
int gtid,
1675 kmp_real64 *lhs, _Quad rhs,
int flag);
1676 double __kmpc_atomic_float8_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1677 kmp_real64 *lhs, _Quad rhs,
int flag);
1678 double __kmpc_atomic_float8_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1679 kmp_real64 *lhs, _Quad rhs,
int flag);
1680 double __kmpc_atomic_float8_div_cpt_fp(
ident_t *id_ref,
int gtid,
1681 kmp_real64 *lhs, _Quad rhs,
int flag);
1683 long double __kmpc_atomic_float10_add_cpt_fp(
ident_t *id_ref,
int gtid,
1684 long double *lhs, _Quad rhs,
1686 long double __kmpc_atomic_float10_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1687 long double *lhs, _Quad rhs,
1689 long double __kmpc_atomic_float10_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1690 long double *lhs, _Quad rhs,
1692 long double __kmpc_atomic_float10_div_cpt_fp(
ident_t *id_ref,
int gtid,
1693 long double *lhs, _Quad rhs,
1696 char __kmpc_atomic_fixed1_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1697 _Quad rhs,
int flag);
1698 unsigned char __kmpc_atomic_fixed1u_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1700 _Quad rhs,
int flag);
1701 char __kmpc_atomic_fixed1_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1702 _Quad rhs,
int flag);
1703 unsigned char __kmpc_atomic_fixed1u_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1705 _Quad rhs,
int flag);
1706 short __kmpc_atomic_fixed2_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1707 _Quad rhs,
int flag);
1708 unsigned short __kmpc_atomic_fixed2u_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1709 unsigned short *lhs,
1710 _Quad rhs,
int flag);
1711 short __kmpc_atomic_fixed2_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1712 _Quad rhs,
int flag);
1713 unsigned short __kmpc_atomic_fixed2u_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1714 unsigned short *lhs,
1715 _Quad rhs,
int flag);
1716 kmp_int32 __kmpc_atomic_fixed4_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1717 kmp_int32 *lhs, _Quad rhs,
1719 kmp_uint32 __kmpc_atomic_fixed4u_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1720 kmp_uint32 *lhs, _Quad rhs,
1722 kmp_int32 __kmpc_atomic_fixed4_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1723 kmp_int32 *lhs, _Quad rhs,
1725 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1726 kmp_uint32 *lhs, _Quad rhs,
1728 kmp_int64 __kmpc_atomic_fixed8_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1729 kmp_int64 *lhs, _Quad rhs,
1731 kmp_uint64 __kmpc_atomic_fixed8u_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1732 kmp_uint64 *lhs, _Quad rhs,
1734 kmp_int64 __kmpc_atomic_fixed8_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1735 kmp_int64 *lhs, _Quad rhs,
1737 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1738 kmp_uint64 *lhs, _Quad rhs,
1740 float __kmpc_atomic_float4_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
float *lhs,
1741 _Quad rhs,
int flag);
1742 float __kmpc_atomic_float4_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
float *lhs,
1743 _Quad rhs,
int flag);
1744 double __kmpc_atomic_float8_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1745 double *lhs, _Quad rhs,
int flag);
1746 double __kmpc_atomic_float8_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1747 double *lhs, _Quad rhs,
int flag);
1748 long double __kmpc_atomic_float10_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1749 long double *lhs, _Quad rhs,
1751 long double __kmpc_atomic_float10_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1752 long double *lhs, _Quad rhs,
1755 #endif // KMP_HAVE_QUAD 1759 #endif // KMP_ARCH_X86 || KMP_ARCH_X86_64