Loading...
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 | /* * Copyright (C) 2007 Atmel Corporation * * This file is subject to the terms and conditions of the GNU Lesser General * Public License. See the file "COPYING.LIB" in the main directory of this * archive for more details. */ #ifndef _AVR32_BITS_ATOMIC_H #define _AVR32_BITS_ATOMIC_H 1 #include <inttypes.h> typedef int32_t atomic32_t; typedef uint32_t uatomic32_t; typedef int_fast32_t atomic_fast32_t; typedef uint_fast32_t uatomic_fast32_t; typedef intptr_t atomicptr_t; typedef uintptr_t uatomicptr_t; typedef intmax_t atomic_max_t; typedef uintmax_t uatomic_max_t; #define __arch_compare_and_exchange_val_8_acq(mem, newval, oldval) \ (abort(), 0) #define __arch_compare_and_exchange_val_16_acq(mem, newval, oldval) \ (abort(), 0) #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \ ({ \ __uint32_t __result; \ __typeof__(*(mem)) __prev; \ __asm__ __volatile__( \ "/* __arch_compare_and_exchange_val_32_acq */\n" \ "1: ssrf 5\n" \ " ld.w %[result], %[m]\n" \ " cp.w %[result], %[old]\n" \ " brne 2f\n" \ " stcond %[m], %[new]\n" \ " brne 1b\n" \ "2:" \ : [result] "=&r"(__result), [m] "=m"(*(mem)) \ : "m"(*(mem)), [old] "ir"(oldval), \ [new] "r"(newval) \ : "memory", "cc"); \ __prev; \ }) #define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \ (abort(), 0) #define __arch_exchange_32_acq(mem, newval) \ ({ \ __typeof__(*(mem)) __oldval; \ __asm__ __volatile__( \ "/*__arch_exchange_32_acq */\n" \ " xchg %[old], %[m], %[new]" \ : [old] "=&r"(__oldval) \ : [m] "r"(mem), [new] "r"(newval) \ : "memory"); \ __oldval; \ }) #define __arch_atomic_exchange_and_add_32(mem, value) \ ({ \ __typeof__(*(mem)) __oldval, __tmp; \ __asm__ __volatile__( \ "/* __arch_atomic_exchange_and_add_32 */\n" \ "1: ssrf 5\n" \ " ld.w %[old], %[m]\n" \ " add %[tmp], %[old], %[val]\n" \ " stcond %[m], %[tmp]\n" \ " brne 1b" \ : [old] "=&r"(__oldval), [tmp] "=&r"(__tmp), \ [m] "=m"(*(mem)) \ : "m"(*(mem)), [val] "r"(value) \ : "memory", "cc"); \ __oldval; \ }) #define __arch_atomic_decrement_if_positive_32(mem) \ ({ \ __typeof__(*(mem)) __oldval, __tmp; \ __asm__ __volatile__( \ "/* __arch_atomic_decrement_if_positive_32 */\n" \ "1: ssrf 5\n" \ " ld.w %[old], %[m]\n" \ " sub %[tmp], %[old], 1\n" \ " brlt 2f\n" \ " stcond %[m], %[tmp]\n" \ " brne 1b" \ "2:" \ : [old] "=&r"(__oldval), [tmp] "=&r"(__tmp), \ [m] "=m"(*(mem)) \ : "m"(*(mem)) \ : "memory", "cc"); \ __oldval; \ }) #define atomic_exchange_acq(mem, newval) \ ({ \ if (sizeof(*(mem)) != 4) \ abort(); \ __arch_exchange_32_acq(mem, newval); \ }) #define atomic_exchange_and_add(mem, newval) \ ({ \ if (sizeof(*(mem)) != 4) \ abort(); \ __arch_atomic_exchange_and_add_32(mem, newval); \ }) #define atomic_decrement_if_positive(mem) \ ({ \ if (sizeof(*(mem)) != 4) \ abort(); \ __arch_atomic_decrement_if_positive_32(mem); \ }) #endif /* _AVR32_BITS_ATOMIC_H */ |