Loading...
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 | #ifndef _INTERNAL_ATOMIC_H
#define _INTERNAL_ATOMIC_H
#include <stdint.h>
static inline int a_ctz_l(unsigned long x)
{
static const char debruijn32[32] = {
0, 1, 23, 2, 29, 24, 19, 3, 30, 27, 25, 11, 20, 8, 4, 13,
31, 22, 28, 18, 26, 10, 7, 12, 21, 17, 9, 6, 16, 5, 15, 14
};
return debruijn32[(x&-x)*0x076be629 >> 27];
}
static inline int a_ctz_64(uint64_t x)
{
uint32_t y = x;
if (!y) {
y = x>>32;
return 32 + a_ctz_l(y);
}
return a_ctz_l(y);
}
#if __ARM_ARCH_6__ || __ARM_ARCH_6K__ || __ARM_ARCH_6ZK__ \
|| __ARM_ARCH_7A__ || __ARM_ARCH_7R__ \
|| __ARM_ARCH >= 7
#if __ARM_ARCH_7A__ || __ARM_ARCH_7R__ || __ARM_ARCH >= 7
#define MEM_BARRIER "dmb ish"
#else
#define MEM_BARRIER "mcr p15,0,r0,c7,c10,5"
#endif
static inline int __k_cas(int t, int s, volatile int *p)
{
int ret;
__asm__(
" " MEM_BARRIER "\n"
"1: ldrex %0,%3\n"
" subs %0,%0,%1\n"
" strexeq %0,%2,%3\n"
" teqeq %0,#1\n"
" beq 1b\n"
" " MEM_BARRIER "\n"
: "=&r"(ret)
: "r"(t), "r"(s), "Q"(*p)
: "memory", "cc" );
return ret;
}
#else
#define __k_cas ((int (*)(int, int, volatile int *))0xffff0fc0)
#endif
static inline int a_cas(volatile int *p, int t, int s)
{
int old;
for (;;) {
if (!__k_cas(t, s, p))
return t;
if ((old=*p) != t)
return old;
}
}
static inline void *a_cas_p(volatile void *p, void *t, void *s)
{
return (void *)a_cas(p, (int)t, (int)s);
}
static inline long a_cas_l(volatile void *p, long t, long s)
{
return a_cas(p, t, s);
}
static inline int a_swap(volatile int *x, int v)
{
int old;
do old = *x;
while (__k_cas(old, v, x));
return old;
}
static inline int a_fetch_add(volatile int *x, int v)
{
int old;
do old = *x;
while (__k_cas(old, old+v, x));
return old;
}
static inline void a_inc(volatile int *x)
{
a_fetch_add(x, 1);
}
static inline void a_dec(volatile int *x)
{
a_fetch_add(x, -1);
}
static inline void a_store(volatile int *p, int x)
{
while (__k_cas(*p, x, p));
}
static inline void a_spin()
{
}
static inline void a_crash()
{
*(volatile char *)0=0;
}
static inline void a_and(volatile int *p, int v)
{
int old;
do old = *p;
while (__k_cas(old, old&v, p));
}
static inline void a_or(volatile int *p, int v)
{
int old;
do old = *p;
while (__k_cas(old, old|v, p));
}
static inline void a_or_l(volatile void *p, long v)
{
a_or(p, v);
}
static inline void a_and_64(volatile uint64_t *p, uint64_t v)
{
union { uint64_t v; uint32_t r[2]; } u = { v };
a_and((int *)p, u.r[0]);
a_and((int *)p+1, u.r[1]);
}
static inline void a_or_64(volatile uint64_t *p, uint64_t v)
{
union { uint64_t v; uint32_t r[2]; } u = { v };
a_or((int *)p, u.r[0]);
a_or((int *)p+1, u.r[1]);
}
#endif
|