Changes in kernel/arch/ia64/include/atomic.h [7a0359b:7038f55] in mainline
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
kernel/arch/ia64/include/atomic.h
r7a0359b r7038f55 27 27 */ 28 28 29 /** @addtogroup ia64 29 /** @addtogroup ia64 30 30 * @{ 31 31 */ … … 36 36 #define KERN_ia64_ATOMIC_H_ 37 37 38 #include <trace.h> 38 /** Atomic addition. 39 * 40 * @param val Atomic value. 41 * @param imm Value to add. 42 * 43 * @return Value before addition. 44 */ 45 static inline long atomic_add(atomic_t *val, int imm) 46 { 47 long v; 39 48 40 NO_TRACE static inline atomic_count_t test_and_set(atomic_t *val) 49 asm volatile ("fetchadd8.rel %0 = %1, %2\n" : "=r" (v), 50 "+m" (val->count) : "i" (imm)); 51 52 return v; 53 } 54 55 static inline uint64_t test_and_set(atomic_t *val) 41 56 { 42 atomic_count_t v;43 57 uint64_t v; 58 44 59 asm volatile ( 45 "movl %[v] = 0x1;;\n" 46 "xchg8 %[v] = %[count], %[v];;\n" 47 : [v] "=r" (v), 48 [count] "+m" (val->count) 60 "movl %0 = 0x1;;\n" 61 "xchg8 %0 = %1, %0;;\n" 62 : "=r" (v), "+m" (val->count) 49 63 ); 50 64 … … 52 66 } 53 67 54 NO_TRACEstatic inline void atomic_lock_arch(atomic_t *val)68 static inline void atomic_lock_arch(atomic_t *val) 55 69 { 56 70 do { 57 while (val->count); 71 while (val->count) 72 ; 58 73 } while (test_and_set(val)); 59 74 } 60 75 61 NO_TRACEstatic inline void atomic_inc(atomic_t *val)76 static inline void atomic_inc(atomic_t *val) 62 77 { 63 atomic_count_t v; 64 65 asm volatile ( 66 "fetchadd8.rel %[v] = %[count], 1\n" 67 : [v] "=r" (v), 68 [count] "+m" (val->count) 69 ); 78 atomic_add(val, 1); 70 79 } 71 80 72 NO_TRACEstatic inline void atomic_dec(atomic_t *val)81 static inline void atomic_dec(atomic_t *val) 73 82 { 74 atomic_count_t v; 75 76 asm volatile ( 77 "fetchadd8.rel %[v] = %[count], -1\n" 78 : [v] "=r" (v), 79 [count] "+m" (val->count) 80 ); 83 atomic_add(val, -1); 81 84 } 82 85 83 NO_TRACE static inline atomic_count_tatomic_preinc(atomic_t *val)86 static inline long atomic_preinc(atomic_t *val) 84 87 { 85 atomic_count_t v; 86 87 asm volatile ( 88 "fetchadd8.rel %[v] = %[count], 1\n" 89 : [v] "=r" (v), 90 [count] "+m" (val->count) 91 ); 92 93 return (v + 1); 88 return atomic_add(val, 1) + 1; 94 89 } 95 90 96 NO_TRACE static inline atomic_count_tatomic_predec(atomic_t *val)91 static inline long atomic_predec(atomic_t *val) 97 92 { 98 atomic_count_t v; 99 100 asm volatile ( 101 "fetchadd8.rel %[v] = %[count], -1\n" 102 : [v] "=r" (v), 103 [count] "+m" (val->count) 104 ); 105 106 return (v - 1); 93 return atomic_add(val, -1) - 1; 107 94 } 108 95 109 NO_TRACE static inline atomic_count_tatomic_postinc(atomic_t *val)96 static inline long atomic_postinc(atomic_t *val) 110 97 { 111 atomic_count_t v; 112 113 asm volatile ( 114 "fetchadd8.rel %[v] = %[count], 1\n" 115 : [v] "=r" (v), 116 [count] "+m" (val->count) 117 ); 118 119 return v; 98 return atomic_add(val, 1); 120 99 } 121 100 122 NO_TRACE static inline atomic_count_tatomic_postdec(atomic_t *val)101 static inline long atomic_postdec(atomic_t *val) 123 102 { 124 atomic_count_t v; 125 126 asm volatile ( 127 "fetchadd8.rel %[v] = %[count], -1\n" 128 : [v] "=r" (v), 129 [count] "+m" (val->count) 130 ); 131 132 return v; 103 return atomic_add(val, -1); 133 104 } 134 105
Note:
See TracChangeset
for help on using the changeset viewer.