40 #define KMP_FTN_PLAIN 1
41 #define KMP_FTN_APPEND 2
42 #define KMP_FTN_UPPER 3
48 #define KMP_PTR_SKIP (sizeof(void*))
55 #define KMP_MEM_CONS_VOLATILE 0
56 #define KMP_MEM_CONS_FENCE 1
58 #ifndef KMP_MEM_CONS_MODEL
59 # define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
63 #define KMP_COMPILER_ICC 0
64 #define KMP_COMPILER_GCC 0
65 #define KMP_COMPILER_CLANG 0
66 #define KMP_COMPILER_MSVC 0
68 #if defined( __INTEL_COMPILER )
69 # undef KMP_COMPILER_ICC
70 # define KMP_COMPILER_ICC 1
71 #elif defined( __clang__ )
72 # undef KMP_COMPILER_CLANG
73 # define KMP_COMPILER_CLANG 1
74 #elif defined( __GNUC__ )
75 # undef KMP_COMPILER_GCC
76 # define KMP_COMPILER_GCC 1
77 #elif defined( _MSC_VER )
78 # undef KMP_COMPILER_MSVC
79 # define KMP_COMPILER_MSVC 1
81 # error Unknown compiler
84 #include "kmp_platform.h"
86 #if (KMP_OS_LINUX || KMP_OS_WINDOWS) && !KMP_OS_CNK && !KMP_ARCH_PPC64
87 # define KMP_AFFINITY_SUPPORTED 1
88 # if KMP_OS_WINDOWS && KMP_ARCH_X86_64
89 # define KMP_GROUP_AFFINITY 1
91 # define KMP_GROUP_AFFINITY 0
94 # define KMP_AFFINITY_SUPPORTED 0
95 # define KMP_GROUP_AFFINITY 0
99 #define KMP_HAVE_QUAD 0
100 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
101 # if KMP_COMPILER_ICC
103 # undef KMP_HAVE_QUAD
104 # define KMP_HAVE_QUAD 1
105 # elif KMP_COMPILER_CLANG
108 typedef long double _Quad;
109 # elif KMP_COMPILER_GCC
110 typedef __float128 _Quad;
111 # undef KMP_HAVE_QUAD
112 # define KMP_HAVE_QUAD 1
113 # elif KMP_COMPILER_MSVC
114 typedef long double _Quad;
117 # if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
118 typedef long double _Quad;
119 # undef KMP_HAVE_QUAD
120 # define KMP_HAVE_QUAD 1
126 typedef char kmp_int8;
127 typedef unsigned char kmp_uint8;
128 typedef short kmp_int16;
129 typedef unsigned short kmp_uint16;
130 typedef int kmp_int32;
131 typedef unsigned int kmp_uint32;
132 # define KMP_INT32_SPEC "d"
133 # define KMP_UINT32_SPEC "u"
134 # ifndef KMP_STRUCT64
135 typedef __int64 kmp_int64;
136 typedef unsigned __int64 kmp_uint64;
137 #define KMP_INT64_SPEC "I64d"
138 #define KMP_UINT64_SPEC "I64u"
140 struct kmp_struct64 {
143 typedef struct kmp_struct64 kmp_int64;
144 typedef struct kmp_struct64 kmp_uint64;
148 # define KMP_INTPTR 1
149 typedef __int64 kmp_intptr_t;
150 typedef unsigned __int64 kmp_uintptr_t;
151 # define KMP_INTPTR_SPEC "I64d"
152 # define KMP_UINTPTR_SPEC "I64u"
157 typedef char kmp_int8;
158 typedef unsigned char kmp_uint8;
159 typedef short kmp_int16;
160 typedef unsigned short kmp_uint16;
161 typedef int kmp_int32;
162 typedef unsigned int kmp_uint32;
163 typedef long long kmp_int64;
164 typedef unsigned long long kmp_uint64;
165 # define KMP_INT32_SPEC "d"
166 # define KMP_UINT32_SPEC "u"
167 # define KMP_INT64_SPEC "lld"
168 # define KMP_UINT64_SPEC "llu"
171 #if KMP_ARCH_X86 || KMP_ARCH_ARM
172 # define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
173 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64
174 # define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
176 # error "Can't determine size_t printf format specifier."
180 # define KMP_SIZE_T_MAX (0xFFFFFFFF)
182 # define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
185 typedef size_t kmp_size_t;
186 typedef float kmp_real32;
187 typedef double kmp_real64;
190 # define KMP_INTPTR 1
191 typedef long kmp_intptr_t;
192 typedef unsigned long kmp_uintptr_t;
193 # define KMP_INTPTR_SPEC "ld"
194 # define KMP_UINTPTR_SPEC "lu"
198 typedef kmp_int64 kmp_int;
199 typedef kmp_uint64 kmp_uint;
200 # define KMP_INT_SPEC KMP_INT64_SPEC
201 # define KMP_UINT_SPEC KMP_UINT64_SPEC
202 # define KMP_INT_MAX ((kmp_int64)0x7FFFFFFFFFFFFFFFLL)
203 # define KMP_INT_MIN ((kmp_int64)0x8000000000000000LL)
205 typedef kmp_int32 kmp_int;
206 typedef kmp_uint32 kmp_uint;
207 # define KMP_INT_SPEC KMP_INT32_SPEC
208 # define KMP_UINT_SPEC KMP_UINT32_SPEC
209 # define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
210 # define KMP_INT_MIN ((kmp_int32)0x80000000)
217 template<
typename T >
220 typedef T unsigned_t;
221 typedef T floating_t;
222 static char const * spec;
226 struct traits_t< signed int > {
227 typedef signed int signed_t;
228 typedef unsigned int unsigned_t;
229 typedef double floating_t;
230 static char const * spec;
234 struct traits_t< unsigned int > {
235 typedef signed int signed_t;
236 typedef unsigned int unsigned_t;
237 typedef double floating_t;
238 static char const * spec;
242 struct traits_t< signed long long > {
243 typedef signed long long signed_t;
244 typedef unsigned long long unsigned_t;
245 typedef long double floating_t;
246 static char const * spec;
250 struct traits_t< unsigned long long > {
251 typedef signed long long signed_t;
252 typedef unsigned long long unsigned_t;
253 typedef long double floating_t;
254 static char const * spec;
257 #endif // __cplusplus
259 #define KMP_EXPORT extern
262 #define __forceinline __inline
265 #define PAGE_SIZE (0x4000)
266 #define PAGE_ALIGNED(_addr) ( ! ((size_t) _addr & \
267 (size_t)(PAGE_SIZE - 1)))
268 #define ALIGN_TO_PAGE(x) (void *)(((size_t)(x)) & ~((size_t)(PAGE_SIZE - 1)))
274 #endif // __cplusplus
276 #define INTERNODE_CACHE_LINE 4096
280 #define CACHE_LINE 128
282 #if ( CACHE_LINE < 64 ) && ! defined( KMP_OS_DARWIN )
284 #warning CACHE_LINE is too small.
288 #define KMP_CACHE_PREFETCH(ADDR)
292 #if KMP_OS_UNIX && defined(__GNUC__)
293 # define KMP_DO_ALIGN(bytes) __attribute__((aligned(bytes)))
294 # define KMP_ALIGN_CACHE __attribute__((aligned(CACHE_LINE)))
295 # define KMP_ALIGN_CACHE_INTERNODE __attribute__((aligned(INTERNODE_CACHE_LINE)))
296 # define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
298 # define KMP_DO_ALIGN(bytes) __declspec( align(bytes) )
299 # define KMP_ALIGN_CACHE __declspec( align(CACHE_LINE) )
300 # define KMP_ALIGN_CACHE_INTERNODE __declspec( align(INTERNODE_CACHE_LINE) )
301 # define KMP_ALIGN(bytes) __declspec( align(bytes) )
305 enum kmp_mem_fence_type {
317 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS
321 #pragma intrinsic(InterlockedExchangeAdd)
322 #pragma intrinsic(InterlockedCompareExchange)
323 #pragma intrinsic(InterlockedExchange)
324 #pragma intrinsic(InterlockedExchange64)
330 # define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd( (volatile long *)(p), 1 )
331 # define KMP_TEST_THEN_INC_ACQ32(p) InterlockedExchangeAdd( (volatile long *)(p), 1 )
332 # define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd( (volatile long *)(p), 4 )
333 # define KMP_TEST_THEN_ADD4_ACQ32(p) InterlockedExchangeAdd( (volatile long *)(p), 4 )
334 # define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd( (volatile long *)(p), -1 )
335 # define KMP_TEST_THEN_DEC_ACQ32(p) InterlockedExchangeAdd( (volatile long *)(p), -1 )
336 # define KMP_TEST_THEN_ADD32(p, v) InterlockedExchangeAdd( (volatile long *)(p), (v) )
338 # define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) InterlockedCompareExchange( (volatile long *)(p),(long)(sv),(long)(cv) )
340 # define KMP_XCHG_FIXED32(p, v) InterlockedExchange( (volatile long *)(p), (long)(v) )
341 # define KMP_XCHG_FIXED64(p, v) InterlockedExchange64( (volatile kmp_int64 *)(p), (kmp_int64)(v) )
343 inline kmp_real32 KMP_XCHG_REAL32(
volatile kmp_real32 *p, kmp_real32 v)
345 kmp_int32 tmp = InterlockedExchange( (
volatile long *)p, *(
long *)&v);
346 return *(kmp_real32*)&tmp;
352 extern kmp_int8 __kmp_test_then_add8(
volatile kmp_int8 *p, kmp_int8 v );
353 extern kmp_int8 __kmp_test_then_or8(
volatile kmp_int8 *p, kmp_int8 v );
354 extern kmp_int8 __kmp_test_then_and8(
volatile kmp_int8 *p, kmp_int8 v );
355 extern kmp_int32 __kmp_test_then_add32(
volatile kmp_int32 *p, kmp_int32 v );
356 extern kmp_int32 __kmp_test_then_or32(
volatile kmp_int32 *p, kmp_int32 v );
357 extern kmp_int32 __kmp_test_then_and32(
volatile kmp_int32 *p, kmp_int32 v );
358 extern kmp_int64 __kmp_test_then_add64(
volatile kmp_int64 *p, kmp_int64 v );
359 extern kmp_int64 __kmp_test_then_or64(
volatile kmp_int64 *p, kmp_int64 v );
360 extern kmp_int64 __kmp_test_then_and64(
volatile kmp_int64 *p, kmp_int64 v );
362 extern kmp_int8 __kmp_compare_and_store8(
volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
363 extern kmp_int16 __kmp_compare_and_store16(
volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
364 extern kmp_int32 __kmp_compare_and_store32(
volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
365 extern kmp_int32 __kmp_compare_and_store64(
volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
366 extern kmp_int8 __kmp_compare_and_store_ret8(
volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
367 extern kmp_int16 __kmp_compare_and_store_ret16(
volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
368 extern kmp_int32 __kmp_compare_and_store_ret32(
volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
369 extern kmp_int64 __kmp_compare_and_store_ret64(
volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
371 extern kmp_int8 __kmp_xchg_fixed8(
volatile kmp_int8 *p, kmp_int8 v );
372 extern kmp_int16 __kmp_xchg_fixed16(
volatile kmp_int16 *p, kmp_int16 v );
373 extern kmp_int32 __kmp_xchg_fixed32(
volatile kmp_int32 *p, kmp_int32 v );
374 extern kmp_int64 __kmp_xchg_fixed64(
volatile kmp_int64 *p, kmp_int64 v );
375 extern kmp_real32 __kmp_xchg_real32(
volatile kmp_real32 *p, kmp_real32 v );
376 extern kmp_real64 __kmp_xchg_real64(
volatile kmp_real64 *p, kmp_real64 v );
380 # define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64( (p), 1LL )
381 # define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64( (p), 1LL )
384 # define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64( (p), 4LL )
385 # define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64( (p), 4LL )
388 # define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64( (p), -1LL )
389 # define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64( (p), -1LL )
391 # define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8( (p), (v) )
392 # define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64( (p), (v) )
394 # define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8( (p), (v) )
395 # define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8( (p), (v) )
396 # define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32( (p), (v) )
397 # define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32( (p), (v) )
398 # define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64( (p), (v) )
399 # define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64( (p), (v) )
401 # define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) )
402 # define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) )
403 # define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) )
404 # define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) )
405 # define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) )
406 # define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) )
407 # define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) )
408 # define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) )
411 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store32( (volatile kmp_int32*)(p), (kmp_int32)(cv), (kmp_int32)(sv) )
413 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store64( (volatile kmp_int64*)(p), (kmp_int64)(cv), (kmp_int64)(sv) )
416 # define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) __kmp_compare_and_store_ret8( (p), (cv), (sv) )
417 # define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) __kmp_compare_and_store_ret16( (p), (cv), (sv) )
419 # define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) __kmp_compare_and_store_ret64( (p), (cv), (sv) )
421 # define KMP_XCHG_FIXED8(p, v) __kmp_xchg_fixed8( (volatile kmp_int8*)(p), (kmp_int8)(v) );
422 # define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16( (p), (v) );
426 # define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64( (p), (v) );
429 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
432 # define KMP_TEST_THEN_INC32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 1 )
433 # define KMP_TEST_THEN_INC_ACQ32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 1 )
434 # define KMP_TEST_THEN_INC64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 1LL )
435 # define KMP_TEST_THEN_INC_ACQ64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 1LL )
436 # define KMP_TEST_THEN_ADD4_32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 4 )
437 # define KMP_TEST_THEN_ADD4_ACQ32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 4 )
438 # define KMP_TEST_THEN_ADD4_64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 4LL )
439 # define KMP_TEST_THEN_ADD4_ACQ64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 4LL )
440 # define KMP_TEST_THEN_DEC32(p) __sync_fetch_and_sub( (kmp_int32 *)(p), 1 )
441 # define KMP_TEST_THEN_DEC_ACQ32(p) __sync_fetch_and_sub( (kmp_int32 *)(p), 1 )
442 # define KMP_TEST_THEN_DEC64(p) __sync_fetch_and_sub( (kmp_int64 *)(p), 1LL )
443 # define KMP_TEST_THEN_DEC_ACQ64(p) __sync_fetch_and_sub( (kmp_int64 *)(p), 1LL )
444 # define KMP_TEST_THEN_ADD8(p, v) __sync_fetch_and_add( (kmp_int8 *)(p), (v) )
445 # define KMP_TEST_THEN_ADD32(p, v) __sync_fetch_and_add( (kmp_int32 *)(p), (v) )
446 # define KMP_TEST_THEN_ADD64(p, v) __sync_fetch_and_add( (kmp_int64 *)(p), (v) )
448 # define KMP_TEST_THEN_OR8(p, v) __sync_fetch_and_or( (kmp_int8 *)(p), (v) )
449 # define KMP_TEST_THEN_AND8(p, v) __sync_fetch_and_and( (kmp_int8 *)(p), (v) )
450 # define KMP_TEST_THEN_OR32(p, v) __sync_fetch_and_or( (kmp_int32 *)(p), (v) )
451 # define KMP_TEST_THEN_AND32(p, v) __sync_fetch_and_and( (kmp_int32 *)(p), (v) )
452 # define KMP_TEST_THEN_OR64(p, v) __sync_fetch_and_or( (kmp_int64 *)(p), (v) )
453 # define KMP_TEST_THEN_AND64(p, v) __sync_fetch_and_and( (kmp_int64 *)(p), (v) )
455 # define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint8 *)(p),(kmp_uint8)(cv),(kmp_uint8)(sv) )
456 # define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint8 *)(p),(kmp_uint8)(cv),(kmp_uint8)(sv) )
457 # define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint16 *)(p),(kmp_uint16)(cv),(kmp_uint16)(sv) )
458 # define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint16 *)(p),(kmp_uint16)(cv),(kmp_uint16)(sv) )
459 # define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint32 *)(p),(kmp_uint32)(cv),(kmp_uint32)(sv) )
460 # define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint32 *)(p),(kmp_uint32)(cv),(kmp_uint32)(sv) )
461 # define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint64 *)(p),(kmp_uint64)(cv),(kmp_uint64)(sv) )
462 # define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint64 *)(p),(kmp_uint64)(cv),(kmp_uint64)(sv) )
463 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __sync_bool_compare_and_swap( (volatile void **)(p),(void *)(cv),(void *)(sv) )
465 # define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint8 *)(p),(kmp_uint8)(cv),(kmp_uint8)(sv) )
466 # define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint16 *)(p),(kmp_uint16)(cv),(kmp_uint16)(sv) )
467 # define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint32 *)(p),(kmp_uint32)(cv),(kmp_uint32)(sv) )
468 # define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint64 *)(p),(kmp_uint64)(cv),(kmp_uint64)(sv) )
470 #define KMP_XCHG_FIXED8(p, v) __sync_lock_test_and_set( (volatile kmp_uint8 *)(p), (kmp_uint8)(v) )
471 #define KMP_XCHG_FIXED16(p, v) __sync_lock_test_and_set( (volatile kmp_uint16 *)(p), (kmp_uint16)(v) )
472 #define KMP_XCHG_FIXED32(p, v) __sync_lock_test_and_set( (volatile kmp_uint32 *)(p), (kmp_uint32)(v) )
473 #define KMP_XCHG_FIXED64(p, v) __sync_lock_test_and_set( (volatile kmp_uint64 *)(p), (kmp_uint64)(v) )
475 inline kmp_real32 KMP_XCHG_REAL32(
volatile kmp_real32 *p, kmp_real32 v)
477 kmp_int32 tmp = __sync_lock_test_and_set( (kmp_int32*)p, *(kmp_int32*)&v);
478 return *(kmp_real32*)&tmp;
481 inline kmp_real64 KMP_XCHG_REAL64(
volatile kmp_real64 *p, kmp_real64 v)
483 kmp_int64 tmp = __sync_lock_test_and_set( (kmp_int64*)p, *(kmp_int64*)&v);
484 return *(kmp_real64*)&tmp;
489 extern kmp_int8 __kmp_test_then_add8(
volatile kmp_int8 *p, kmp_int8 v );
490 extern kmp_int8 __kmp_test_then_or8(
volatile kmp_int8 *p, kmp_int8 v );
491 extern kmp_int8 __kmp_test_then_and8(
volatile kmp_int8 *p, kmp_int8 v );
492 extern kmp_int32 __kmp_test_then_add32(
volatile kmp_int32 *p, kmp_int32 v );
493 extern kmp_int32 __kmp_test_then_or32(
volatile kmp_int32 *p, kmp_int32 v );
494 extern kmp_int32 __kmp_test_then_and32(
volatile kmp_int32 *p, kmp_int32 v );
495 extern kmp_int64 __kmp_test_then_add64(
volatile kmp_int64 *p, kmp_int64 v );
496 extern kmp_int64 __kmp_test_then_or64(
volatile kmp_int64 *p, kmp_int64 v );
497 extern kmp_int64 __kmp_test_then_and64(
volatile kmp_int64 *p, kmp_int64 v );
499 extern kmp_int8 __kmp_compare_and_store8(
volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
500 extern kmp_int16 __kmp_compare_and_store16(
volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
501 extern kmp_int32 __kmp_compare_and_store32(
volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
502 extern kmp_int32 __kmp_compare_and_store64(
volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
503 extern kmp_int8 __kmp_compare_and_store_ret8(
volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
504 extern kmp_int16 __kmp_compare_and_store_ret16(
volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
505 extern kmp_int32 __kmp_compare_and_store_ret32(
volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
506 extern kmp_int64 __kmp_compare_and_store_ret64(
volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
508 extern kmp_int8 __kmp_xchg_fixed8(
volatile kmp_int8 *p, kmp_int8 v );
509 extern kmp_int16 __kmp_xchg_fixed16(
volatile kmp_int16 *p, kmp_int16 v );
510 extern kmp_int32 __kmp_xchg_fixed32(
volatile kmp_int32 *p, kmp_int32 v );
511 extern kmp_int64 __kmp_xchg_fixed64(
volatile kmp_int64 *p, kmp_int64 v );
512 extern kmp_real32 __kmp_xchg_real32(
volatile kmp_real32 *p, kmp_real32 v );
513 extern kmp_real64 __kmp_xchg_real64(
volatile kmp_real64 *p, kmp_real64 v );
515 # define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32( (p), 1 )
516 # define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32( (p), 1 )
517 # define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64( (p), 1LL )
518 # define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64( (p), 1LL )
519 # define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32( (p), 4 )
520 # define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32( (p), 4 )
521 # define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64( (p), 4LL )
522 # define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64( (p), 4LL )
523 # define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32( (p), -1 )
524 # define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32( (p), -1 )
525 # define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64( (p), -1LL )
526 # define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64( (p), -1LL )
527 # define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8( (p), (v) )
528 # define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32( (p), (v) )
529 # define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64( (p), (v) )
531 # define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8( (p), (v) )
532 # define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8( (p), (v) )
533 # define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32( (p), (v) )
534 # define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32( (p), (v) )
535 # define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64( (p), (v) )
536 # define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64( (p), (v) )
538 # define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) )
539 # define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) )
540 # define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) )
541 # define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) )
542 # define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) )
543 # define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) )
544 # define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) )
545 # define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) )
548 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store32( (volatile kmp_int32*)(p), (kmp_int32)(cv), (kmp_int32)(sv) )
550 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store64( (volatile kmp_int64*)(p), (kmp_int64)(cv), (kmp_int64)(sv) )
553 # define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) __kmp_compare_and_store_ret8( (p), (cv), (sv) )
554 # define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) __kmp_compare_and_store_ret16( (p), (cv), (sv) )
555 # define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) __kmp_compare_and_store_ret32( (p), (cv), (sv) )
556 # define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) __kmp_compare_and_store_ret64( (p), (cv), (sv) )
558 # define KMP_XCHG_FIXED8(p, v) __kmp_xchg_fixed8( (volatile kmp_int8*)(p), (kmp_int8)(v) );
559 # define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16( (p), (v) );
560 # define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32( (p), (v) );
561 # define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64( (p), (v) );
562 # define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32( (p), (v) );
563 # define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64( (p), (v) );
572 # define KMP_MB() asm ("nop")
573 # define KMP_IMB() asm ("nop")
580 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64
581 # define KMP_MB() __sync_synchronize()
593 # define KMP_ST_REL32(A,D) ( *(A) = (D) )
597 # define KMP_ST_REL64(A,D) ( *(A) = (D) )
601 # define KMP_LD_ACQ32(A) ( *(A) )
605 # define KMP_LD_ACQ64(A) ( *(A) )
623 #define TCW_1(a,b) (a) = (b)
625 #define TCW_4(a,b) (a) = (b)
627 #define TCW_8(a,b) (a) = (b)
628 #define TCR_SYNC_4(a) (a)
629 #define TCW_SYNC_4(a,b) (a) = (b)
630 #define TCX_SYNC_4(a,b,c) KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), (kmp_int32)(b), (kmp_int32)(c))
631 #define TCR_SYNC_8(a) (a)
632 #define TCW_SYNC_8(a,b) (a) = (b)
633 #define TCX_SYNC_8(a,b,c) KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), (kmp_int64)(b), (kmp_int64)(c))
637 #define TCR_PTR(a) ((void *)TCR_4(a))
638 #define TCW_PTR(a,b) TCW_4((a),(b))
639 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
640 #define TCW_SYNC_PTR(a,b) TCW_SYNC_4((a),(b))
641 #define TCX_SYNC_PTR(a,b,c) ((void *)TCX_SYNC_4((a),(b),(c)))
645 #define TCR_PTR(a) ((void *)TCR_8(a))
646 #define TCW_PTR(a,b) TCW_8((a),(b))
647 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
648 #define TCW_SYNC_PTR(a,b) TCW_SYNC_8((a),(b))
649 #define TCX_SYNC_PTR(a,b,c) ((void *)TCX_SYNC_8((a),(b),(c)))
660 # define FTN_TRUE TRUE
664 # define FTN_FALSE FALSE
667 typedef void (*microtask_t)(
int *gtid,
int *npr, ... );
669 #ifdef USE_VOLATILE_CAST
670 # define VOLATILE_CAST(x) (volatile x)
672 # define VOLATILE_CAST(x) (x)
676 # define KMP_WAIT_YIELD __kmp_wait_yield_8
677 # define KMP_EQ __kmp_eq_8
678 # define KMP_NEQ __kmp_neq_8
679 # define KMP_LT __kmp_lt_8
680 # define KMP_GE __kmp_ge_8
681 # define KMP_LE __kmp_le_8
683 # define KMP_WAIT_YIELD __kmp_wait_yield_4
684 # define KMP_EQ __kmp_eq_4
685 # define KMP_NEQ __kmp_neq_4
686 # define KMP_LT __kmp_lt_4
687 # define KMP_GE __kmp_ge_4
688 # define KMP_LE __kmp_le_4
692 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
693 # define STATIC_EFI2_WORKAROUND
695 # define STATIC_EFI2_WORKAROUND static
700 #define KMP_USE_BGET 1
705 #ifndef USE_SYSFS_INFO
706 # define USE_SYSFS_INFO 0
708 #ifndef USE_CMPXCHG_FIX
709 # define USE_CMPXCHG_FIX 1
713 #ifndef KMP_USE_DYNAMIC_LOCK
714 # define KMP_USE_DYNAMIC_LOCK 0
718 enum kmp_warnings_level {
719 kmp_warnings_off = 0,
721 kmp_warnings_explicit = 6,
726 #include "kmp_safe_c_api.h"
730 #endif // __cplusplus