Changes in kernel/arch/amd64/include/atomic.h [ba371e1:c00589d] in mainline
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
kernel/arch/amd64/include/atomic.h
rba371e1 rc00589d 40 40 #include <preemption.h> 41 41 42 static inline void atomic_inc(atomic_t *val) 43 { 42 static inline void atomic_inc(atomic_t *val) { 44 43 #ifdef CONFIG_SMP 45 44 asm volatile ( … … 55 54 } 56 55 57 static inline void atomic_dec(atomic_t *val) 58 { 56 static inline void atomic_dec(atomic_t *val) { 59 57 #ifdef CONFIG_SMP 60 58 asm volatile ( … … 70 68 } 71 69 72 static inline atomic_count_t atomic_postinc(atomic_t *val)70 static inline long atomic_postinc(atomic_t *val) 73 71 { 74 atomic_count_tr = 1;72 long r = 1; 75 73 76 74 asm volatile ( 77 75 "lock xaddq %[r], %[count]\n" 78 : [count] "+m" (val->count), 79 [r] "+r" (r) 76 : [count] "+m" (val->count), [r] "+r" (r) 80 77 ); 81 78 … … 83 80 } 84 81 85 static inline atomic_count_t atomic_postdec(atomic_t *val)82 static inline long atomic_postdec(atomic_t *val) 86 83 { 87 atomic_count_tr = -1;84 long r = -1; 88 85 89 86 asm volatile ( 90 87 "lock xaddq %[r], %[count]\n" 91 : [count] "+m" (val->count), 92 [r] "+r" (r) 88 : [count] "+m" (val->count), [r] "+r" (r) 93 89 ); 94 90 … … 99 95 #define atomic_predec(val) (atomic_postdec(val) - 1) 100 96 101 static inline atomic_count_t test_and_set(atomic_t *val) 102 { 103 atomic_count_t v = 1; 97 static inline uint64_t test_and_set(atomic_t *val) { 98 uint64_t v; 104 99 105 100 asm volatile ( 101 "movq $1, %[v]\n" 106 102 "xchgq %[v], %[count]\n" 107 : [v] "+r" (v), 108 [count] "+m" (val->count) 103 : [v] "=r" (v), [count] "+m" (val->count) 109 104 ); 110 105 … … 112 107 } 113 108 109 114 110 /** amd64 specific fast spinlock */ 115 111 static inline void atomic_lock_arch(atomic_t *val) 116 112 { 117 atomic_count_t tmp;113 uint64_t tmp; 118 114 119 115 preemption_disable(); … … 129 125 "testq %[tmp], %[tmp]\n" 130 126 "jnz 0b\n" 131 : [count] "+m" (val->count), 132 [tmp] "=&r" (tmp) 127 : [count] "+m" (val->count), [tmp] "=&r" (tmp) 133 128 ); 134 135 129 /* 136 130 * Prevent critical section code from bleeding out this way up.
Note:
See TracChangeset
for help on using the changeset viewer.