intrinsics.c (1249B)
1 #define FORCE_INLINE inline __attribute__((always_inline)) 2 3 #define atomic_and(ptr, n) __atomic_and_fetch(ptr, n, __ATOMIC_RELEASE); 4 #define atomic_fetch_add(ptr, n) __atomic_fetch_add(ptr, n, __ATOMIC_RELEASE); 5 #define atomic_inc(ptr, n) __atomic_fetch_add(ptr, n, __ATOMIC_ACQ_REL) 6 #define atomic_load(ptr) __atomic_load_n(ptr, __ATOMIC_ACQUIRE) 7 #define atomic_exchange_n(ptr, val) __atomic_exchange_n(ptr, val, __ATOMIC_SEQ_CST) 8 #define atomic_cas(ptr, cptr, n) __atomic_compare_exchange_n(ptr, cptr, n, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) 9 10 function FORCE_INLINE u32 11 clz_u32(u32 a) 12 { 13 u32 result = 32; 14 if (a) result = __builtin_clz(a); 15 return result; 16 } 17 18 function FORCE_INLINE u32 19 ctz_u32(u32 a) 20 { 21 u32 result = 32; 22 if (a) result = __builtin_ctz(a); 23 return result; 24 } 25 26 #ifdef __ARM_ARCH_ISA_A64 27 /* TODO? debuggers just loop here forever and need a manual PC increment (jump +1 in gdb) */ 28 #define debugbreak() asm volatile ("brk 0xf000") 29 30 function FORCE_INLINE u64 31 rdtsc(void) 32 { 33 register u64 cntvct asm("x0"); 34 asm volatile ("mrs x0, cntvct_el0" : "=x"(cntvct)); 35 return cntvct; 36 } 37 38 #elif __x86_64__ 39 #include <immintrin.h> 40 41 #define debugbreak() asm volatile ("int3; nop") 42 43 #define rdtsc() __rdtsc() 44 45 #endif